Commit 529a1ac4 authored by Nguyen Huy Nhat Anh's avatar Nguyen Huy Nhat Anh

init new framework

parents
// !$*UTF8*$!
{
archiveVersion = 1;
classes = {
};
objectVersion = 50;
objects = {
/* Begin PBXBuildFile section */
95182D0624B3343E00405EA9 /* liveness.tflite in Resources */ = {isa = PBXBuildFile; fileRef = 95182D0524B3343D00405EA9 /* liveness.tflite */; };
9546DDB5247D171500AF50DE /* ExtString.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9546DDB4247D171500AF50DE /* ExtString.swift */; };
9546DDC0247D1FA200AF50DE /* Localizable.strings in Resources */ = {isa = PBXBuildFile; fileRef = 9546DDC2247D1FA200AF50DE /* Localizable.strings */; };
9546DDD0247D2C0C00AF50DE /* SBKCaptureCardVC.xib in Resources */ = {isa = PBXBuildFile; fileRef = 9546DDD2247D2C0C00AF50DE /* SBKCaptureCardVC.xib */; };
9546DDDC247E197800AF50DE /* Global.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9546DDDB247E197800AF50DE /* Global.swift */; };
9551057C2477746A0053036F /* OCR_SDK.h in Headers */ = {isa = PBXBuildFile; fileRef = 9551057A2477746A0053036F /* OCR_SDK.h */; settings = {ATTRIBUTES = (Public, ); }; };
955105AB247774CC0053036F /* SBOCRRequest.swift in Sources */ = {isa = PBXBuildFile; fileRef = 95510583247774CC0053036F /* SBOCRRequest.swift */; };
955105AC247774CC0053036F /* ExtUiViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 95510585247774CC0053036F /* ExtUiViewController.swift */; };
955105AD247774CC0053036F /* Loadding.swift in Sources */ = {isa = PBXBuildFile; fileRef = 95510586247774CC0053036F /* Loadding.swift */; };
955105AE247774CC0053036F /* TutorialFace1.png in Resources */ = {isa = PBXBuildFile; fileRef = 95510588247774CC0053036F /* TutorialFace1.png */; };
955105AF247774CC0053036F /* TutorialFaceP.png in Resources */ = {isa = PBXBuildFile; fileRef = 95510589247774CC0053036F /* TutorialFaceP.png */; };
955105B0247774CC0053036F /* TutorialFace3.png in Resources */ = {isa = PBXBuildFile; fileRef = 9551058A247774CC0053036F /* TutorialFace3.png */; };
955105B1247774CC0053036F /* TutorialFace2.png in Resources */ = {isa = PBXBuildFile; fileRef = 9551058B247774CC0053036F /* TutorialFace2.png */; };
955105B3247774CC0053036F /* TutorialFaceCheckBox.png in Resources */ = {isa = PBXBuildFile; fileRef = 9551058D247774CC0053036F /* TutorialFaceCheckBox.png */; };
955105B4247774CC0053036F /* background.png in Resources */ = {isa = PBXBuildFile; fileRef = 9551058E247774CC0053036F /* background.png */; };
955105B5247774CC0053036F /* iconCap.png in Resources */ = {isa = PBXBuildFile; fileRef = 9551058F247774CC0053036F /* iconCap.png */; };
955105B6247774CC0053036F /* Screen Shot 2020-05-12 at 15.14.44.png in Resources */ = {isa = PBXBuildFile; fileRef = 95510590247774CC0053036F /* Screen Shot 2020-05-12 at 15.14.44.png */; };
955105B9247774CC0053036F /* cmndF1.png in Resources */ = {isa = PBXBuildFile; fileRef = 95510593247774CC0053036F /* cmndF1.png */; };
955105BB247774CC0053036F /* cmndF2.png in Resources */ = {isa = PBXBuildFile; fileRef = 95510595247774CC0053036F /* cmndF2.png */; };
955105BC247774CC0053036F /* SBKTutorialFaceVC.xib in Resources */ = {isa = PBXBuildFile; fileRef = 95510598247774CC0053036F /* SBKTutorialFaceVC.xib */; };
955105BD247774CC0053036F /* SBKTutorialFaceVC.swift in Sources */ = {isa = PBXBuildFile; fileRef = 95510599247774CC0053036F /* SBKTutorialFaceVC.swift */; };
955105BE247774CC0053036F /* SBKTutorialVC.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9551059B247774CC0053036F /* SBKTutorialVC.swift */; };
955105BF247774CC0053036F /* SBKTutorialVC.xib in Resources */ = {isa = PBXBuildFile; fileRef = 9551059C247774CC0053036F /* SBKTutorialVC.xib */; };
955105C1247774CC0053036F /* SBKResultCapture.xib in Resources */ = {isa = PBXBuildFile; fileRef = 955105A0247774CC0053036F /* SBKResultCapture.xib */; };
955105C2247774CC0053036F /* SBKResultCapture.swift in Sources */ = {isa = PBXBuildFile; fileRef = 955105A1247774CC0053036F /* SBKResultCapture.swift */; };
955105C3247774CC0053036F /* SBKResultFaceVC.swift in Sources */ = {isa = PBXBuildFile; fileRef = 955105A3247774CC0053036F /* SBKResultFaceVC.swift */; };
955105C4247774CC0053036F /* SBKResultFaceVC.xib in Resources */ = {isa = PBXBuildFile; fileRef = 955105A4247774CC0053036F /* SBKResultFaceVC.xib */; };
955105C5247774CC0053036F /* SBKCaptureFaceVC.xib in Resources */ = {isa = PBXBuildFile; fileRef = 955105A6247774CC0053036F /* SBKCaptureFaceVC.xib */; };
955105C6247774CC0053036F /* SBKCaptureFaceVC.swift in Sources */ = {isa = PBXBuildFile; fileRef = 955105A7247774CC0053036F /* SBKCaptureFaceVC.swift */; };
955105C8247774CC0053036F /* SBKCaptureCardVC.swift in Sources */ = {isa = PBXBuildFile; fileRef = 955105AA247774CC0053036F /* SBKCaptureCardVC.swift */; };
955105CA247775290053036F /* SB_KYC_SDK.swift in Sources */ = {isa = PBXBuildFile; fileRef = 955105C9247775290053036F /* SB_KYC_SDK.swift */; };
955105FA2477B52C0053036F /* back.png in Resources */ = {isa = PBXBuildFile; fileRef = 955105F92477B52C0053036F /* back.png */; };
955BEC4C249083A1001FB052 /* SBValidateInput.swift in Sources */ = {isa = PBXBuildFile; fileRef = 955BEC4B249083A1001FB052 /* SBValidateInput.swift */; };
955BEC4E249098C2001FB052 /* ExtUIColor.swift in Sources */ = {isa = PBXBuildFile; fileRef = 955BEC4D249098C2001FB052 /* ExtUIColor.swift */; };
955BECE624935A14001FB052 /* ic_record.png in Resources */ = {isa = PBXBuildFile; fileRef = 955BECE524935A14001FB052 /* ic_record.png */; };
955E7AC924D957140048FC06 /* Next@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 955E7AC824D957140048FC06 /* Next@2x.png */; };
955E7ADF24D967B20048FC06 /* Card-2@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 955E7ADE24D967B10048FC06 /* Card-2@2x.png */; };
955E7AE124D967BD0048FC06 /* Car-2 copy@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 955E7AE024D967BD0048FC06 /* Car-2 copy@2x.png */; };
955E7AE324D967CE0048FC06 /* Passport-2 copy@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 955E7AE224D967CE0048FC06 /* Passport-2 copy@2x.png */; };
956BB56E24DBB9B7000C88D2 /* Back@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 956BB56D24DBB9B7000C88D2 /* Back@2x.png */; };
956BB5AF24DCFFB2000C88D2 /* Hat@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 956BB5AB24DCFFB1000C88D2 /* Hat@2x.png */; };
956BB5B024DCFFB2000C88D2 /* Glasses@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 956BB5AC24DCFFB1000C88D2 /* Glasses@2x.png */; };
956BB5B124DCFFB2000C88D2 /* Brighness@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 956BB5AD24DCFFB1000C88D2 /* Brighness@2x.png */; };
956BB5B224DCFFB2000C88D2 /* Holdphone@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 956BB5AE24DCFFB2000C88D2 /* Holdphone@2x.png */; };
956BB5BB24DD31F7000C88D2 /* Scan-1@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 956BB5B724DD31F6000C88D2 /* Scan-1@2x.png */; };
956BB5BC24DD31F7000C88D2 /* Scan-3@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 956BB5B824DD31F6000C88D2 /* Scan-3@2x.png */; };
956BB5BD24DD31F7000C88D2 /* Scan-4@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 956BB5B924DD31F7000C88D2 /* Scan-4@2x.png */; };
956BB5BE24DD31F7000C88D2 /* Scan-5@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 956BB5BA24DD31F7000C88D2 /* Scan-5@2x.png */; };
957DF5F324C035C700FE6A67 /* objcio.cer in Resources */ = {isa = PBXBuildFile; fileRef = 957DF5F224C035C700FE6A67 /* objcio.cer */; };
9580130F2489F1EA00846F8A /* SBKRecordFace.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9580130D2489F1EA00846F8A /* SBKRecordFace.swift */; };
958013102489F1EA00846F8A /* SBKRecordFace.xib in Resources */ = {isa = PBXBuildFile; fileRef = 9580130E2489F1EA00846F8A /* SBKRecordFace.xib */; };
95801347248A237000846F8A /* SBKModelDataHandler.swift in Sources */ = {isa = PBXBuildFile; fileRef = 95801346248A237000846F8A /* SBKModelDataHandler.swift */; };
95801349248A25BC00846F8A /* CVPixelBufferExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = 95801348248A25BC00846F8A /* CVPixelBufferExtension.swift */; };
958D36C224C18BB1004B27EB /* Pods_OCR_SDK.framework in Embed Frameworks */ = {isa = PBXBuildFile; fileRef = 3B6B0136F5F5B4ED2341A91B /* Pods_OCR_SDK.framework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; };
95A6BA6224E1627800A34ADD /* back_light.png in Resources */ = {isa = PBXBuildFile; fileRef = 95A6BA6124E1627800A34ADD /* back_light.png */; };
95FAB20B24986B9600CE7913 /* valid_idcard.tflite in Resources */ = {isa = PBXBuildFile; fileRef = 95FAB20A24986B9600CE7913 /* valid_idcard.tflite */; };
95FAB2672499C89400CE7913 /* rotate.png in Resources */ = {isa = PBXBuildFile; fileRef = 95FAB2662499C89400CE7913 /* rotate.png */; };
95FAF51E24EA3FE300C161F2 /* Caution@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 95FAF51D24EA3FE300C161F2 /* Caution@2x.png */; };
95FAF52024EA3FEE00C161F2 /* Button_Do@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 95FAF51F24EA3FEE00C161F2 /* Button_Do@2x.png */; };
95FAF56E24EA83C900C161F2 /* Place within the box.png in Resources */ = {isa = PBXBuildFile; fileRef = 95FAF56B24EA83C800C161F2 /* Place within the box.png */; };
95FAF56F24EA83C900C161F2 /* Avoid glare.png in Resources */ = {isa = PBXBuildFile; fileRef = 95FAF56C24EA83C900C161F2 /* Avoid glare.png */; };
95FAF57024EA83C900C161F2 /* Do not place outside.png in Resources */ = {isa = PBXBuildFile; fileRef = 95FAF56D24EA83C900C161F2 /* Do not place outside.png */; };
CCCF85EB83511B97EF23244B /* Pods_OCR_SDK.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = B6D65EE1B3D4F09B622C686E /* Pods_OCR_SDK.framework */; };
/* End PBXBuildFile section */
/* Begin PBXCopyFilesBuildPhase section */
958D36C324C18BB1004B27EB /* Embed Frameworks */ = {
isa = PBXCopyFilesBuildPhase;
buildActionMask = 2147483647;
dstPath = "";
dstSubfolderSpec = 10;
files = (
958D36C224C18BB1004B27EB /* Pods_OCR_SDK.framework in Embed Frameworks */,
);
name = "Embed Frameworks";
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXCopyFilesBuildPhase section */
/* Begin PBXFileReference section */
2A440D461209C526DEA3FD58 /* Pods-OCR-SDK.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-OCR-SDK.debug.xcconfig"; path = "Target Support Files/Pods-OCR-SDK/Pods-OCR-SDK.debug.xcconfig"; sourceTree = "<group>"; };
3B6B0136F5F5B4ED2341A91B /* Pods_OCR_SDK.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_OCR_SDK.framework; sourceTree = BUILT_PRODUCTS_DIR; };
8C1C048EB777A910827003CA /* Pods-OCR-SDK.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-OCR-SDK.release.xcconfig"; path = "Target Support Files/Pods-OCR-SDK/Pods-OCR-SDK.release.xcconfig"; sourceTree = "<group>"; };
95182D0524B3343D00405EA9 /* liveness.tflite */ = {isa = PBXFileReference; lastKnownFileType = file; path = liveness.tflite; sourceTree = "<group>"; };
9546DDB4247D171500AF50DE /* ExtString.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ExtString.swift; sourceTree = "<group>"; };
9546DDC1247D1FA200AF50DE /* en */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = en; path = en.lproj/Localizable.strings; sourceTree = "<group>"; };
9546DDC3247D1FAA00AF50DE /* vi */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = vi; path = vi.lproj/Localizable.strings; sourceTree = "<group>"; };
9546DDD1247D2C0C00AF50DE /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.xib; name = Base; path = Base.lproj/SBKCaptureCardVC.xib; sourceTree = "<group>"; };
9546DDD4247D2C1700AF50DE /* vi */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = vi; path = vi.lproj/SBKCaptureCardVC.strings; sourceTree = "<group>"; };
9546DDD6247D2C1A00AF50DE /* en */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = en; path = en.lproj/SBKCaptureCardVC.strings; sourceTree = "<group>"; };
9546DDDB247E197800AF50DE /* Global.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Global.swift; sourceTree = "<group>"; };
955105772477746A0053036F /* SB_KYC_SDK.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = SB_KYC_SDK.framework; sourceTree = BUILT_PRODUCTS_DIR; };
9551057A2477746A0053036F /* OCR_SDK.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = OCR_SDK.h; sourceTree = "<group>"; };
9551057B2477746A0053036F /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
95510583247774CC0053036F /* SBOCRRequest.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SBOCRRequest.swift; sourceTree = "<group>"; };
95510585247774CC0053036F /* ExtUiViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ExtUiViewController.swift; sourceTree = "<group>"; };
95510586247774CC0053036F /* Loadding.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Loadding.swift; sourceTree = "<group>"; };
95510588247774CC0053036F /* TutorialFace1.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = TutorialFace1.png; sourceTree = "<group>"; };
95510589247774CC0053036F /* TutorialFaceP.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = TutorialFaceP.png; sourceTree = "<group>"; };
9551058A247774CC0053036F /* TutorialFace3.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = TutorialFace3.png; sourceTree = "<group>"; };
9551058B247774CC0053036F /* TutorialFace2.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = TutorialFace2.png; sourceTree = "<group>"; };
9551058D247774CC0053036F /* TutorialFaceCheckBox.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = TutorialFaceCheckBox.png; sourceTree = "<group>"; };
9551058E247774CC0053036F /* background.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = background.png; sourceTree = "<group>"; };
9551058F247774CC0053036F /* iconCap.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = iconCap.png; sourceTree = "<group>"; };
95510590247774CC0053036F /* Screen Shot 2020-05-12 at 15.14.44.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Screen Shot 2020-05-12 at 15.14.44.png"; sourceTree = "<group>"; };
95510593247774CC0053036F /* cmndF1.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = cmndF1.png; sourceTree = "<group>"; };
95510595247774CC0053036F /* cmndF2.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = cmndF2.png; sourceTree = "<group>"; };
95510598247774CC0053036F /* SBKTutorialFaceVC.xib */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.xib; path = SBKTutorialFaceVC.xib; sourceTree = "<group>"; };
95510599247774CC0053036F /* SBKTutorialFaceVC.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SBKTutorialFaceVC.swift; sourceTree = "<group>"; };
9551059B247774CC0053036F /* SBKTutorialVC.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SBKTutorialVC.swift; sourceTree = "<group>"; };
9551059C247774CC0053036F /* SBKTutorialVC.xib */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.xib; path = SBKTutorialVC.xib; sourceTree = "<group>"; };
955105A0247774CC0053036F /* SBKResultCapture.xib */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.xib; path = SBKResultCapture.xib; sourceTree = "<group>"; };
955105A1247774CC0053036F /* SBKResultCapture.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SBKResultCapture.swift; sourceTree = "<group>"; };
955105A3247774CC0053036F /* SBKResultFaceVC.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SBKResultFaceVC.swift; sourceTree = "<group>"; };
955105A4247774CC0053036F /* SBKResultFaceVC.xib */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.xib; path = SBKResultFaceVC.xib; sourceTree = "<group>"; };
955105A6247774CC0053036F /* SBKCaptureFaceVC.xib */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.xib; path = SBKCaptureFaceVC.xib; sourceTree = "<group>"; };
955105A7247774CC0053036F /* SBKCaptureFaceVC.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SBKCaptureFaceVC.swift; sourceTree = "<group>"; };
955105AA247774CC0053036F /* SBKCaptureCardVC.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SBKCaptureCardVC.swift; sourceTree = "<group>"; };
955105C9247775290053036F /* SB_KYC_SDK.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SB_KYC_SDK.swift; sourceTree = "<group>"; };
955105F92477B52C0053036F /* back.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = back.png; sourceTree = "<group>"; };
955BEC4B249083A1001FB052 /* SBValidateInput.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SBValidateInput.swift; sourceTree = "<group>"; };
955BEC4D249098C2001FB052 /* ExtUIColor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ExtUIColor.swift; sourceTree = "<group>"; };
955BECE524935A14001FB052 /* ic_record.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = ic_record.png; sourceTree = "<group>"; };
955E7AC824D957140048FC06 /* Next@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Next@2x.png"; sourceTree = "<group>"; };
955E7ADE24D967B10048FC06 /* Card-2@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Card-2@2x.png"; sourceTree = "<group>"; };
955E7AE024D967BD0048FC06 /* Car-2 copy@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Car-2 copy@2x.png"; sourceTree = "<group>"; };
955E7AE224D967CE0048FC06 /* Passport-2 copy@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Passport-2 copy@2x.png"; sourceTree = "<group>"; };
956BB56D24DBB9B7000C88D2 /* Back@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Back@2x.png"; sourceTree = "<group>"; };
956BB5AB24DCFFB1000C88D2 /* Hat@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Hat@2x.png"; sourceTree = "<group>"; };
956BB5AC24DCFFB1000C88D2 /* Glasses@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Glasses@2x.png"; sourceTree = "<group>"; };
956BB5AD24DCFFB1000C88D2 /* Brighness@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Brighness@2x.png"; sourceTree = "<group>"; };
956BB5AE24DCFFB2000C88D2 /* Holdphone@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Holdphone@2x.png"; sourceTree = "<group>"; };
956BB5B724DD31F6000C88D2 /* Scan-1@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Scan-1@2x.png"; sourceTree = "<group>"; };
956BB5B824DD31F6000C88D2 /* Scan-3@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Scan-3@2x.png"; sourceTree = "<group>"; };
956BB5B924DD31F7000C88D2 /* Scan-4@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Scan-4@2x.png"; sourceTree = "<group>"; };
956BB5BA24DD31F7000C88D2 /* Scan-5@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Scan-5@2x.png"; sourceTree = "<group>"; };
957DF5F224C035C700FE6A67 /* objcio.cer */ = {isa = PBXFileReference; lastKnownFileType = file; path = objcio.cer; sourceTree = "<group>"; };
9580130D2489F1EA00846F8A /* SBKRecordFace.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SBKRecordFace.swift; sourceTree = "<group>"; };
9580130E2489F1EA00846F8A /* SBKRecordFace.xib */ = {isa = PBXFileReference; lastKnownFileType = file.xib; path = SBKRecordFace.xib; sourceTree = "<group>"; };
95801346248A237000846F8A /* SBKModelDataHandler.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SBKModelDataHandler.swift; sourceTree = "<group>"; };
95801348248A25BC00846F8A /* CVPixelBufferExtension.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CVPixelBufferExtension.swift; sourceTree = "<group>"; };
95A6BA6124E1627800A34ADD /* back_light.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = back_light.png; sourceTree = "<group>"; };
95FAB20A24986B9600CE7913 /* valid_idcard.tflite */ = {isa = PBXFileReference; lastKnownFileType = file; path = valid_idcard.tflite; sourceTree = "<group>"; };
95FAB2662499C89400CE7913 /* rotate.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = rotate.png; sourceTree = "<group>"; };
95FAF51D24EA3FE300C161F2 /* Caution@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Caution@2x.png"; sourceTree = "<group>"; };
95FAF51F24EA3FEE00C161F2 /* Button_Do@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Button_Do@2x.png"; sourceTree = "<group>"; };
95FAF56B24EA83C800C161F2 /* Place within the box.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Place within the box.png"; sourceTree = "<group>"; };
95FAF56C24EA83C900C161F2 /* Avoid glare.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Avoid glare.png"; sourceTree = "<group>"; };
95FAF56D24EA83C900C161F2 /* Do not place outside.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Do not place outside.png"; sourceTree = "<group>"; };
B6D65EE1B3D4F09B622C686E /* Pods_OCR_SDK.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_OCR_SDK.framework; sourceTree = BUILT_PRODUCTS_DIR; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
955105742477746A0053036F /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
CCCF85EB83511B97EF23244B /* Pods_OCR_SDK.framework in Frameworks */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXFrameworksBuildPhase section */
/* Begin PBXGroup section */
6728428CA4AB34E672571668 /* Pods */ = {
isa = PBXGroup;
children = (
2A440D461209C526DEA3FD58 /* Pods-OCR-SDK.debug.xcconfig */,
8C1C048EB777A910827003CA /* Pods-OCR-SDK.release.xcconfig */,
);
path = Pods;
sourceTree = "<group>";
};
9526FFB224C8176300FF24D5 /* Recovered References */ = {
isa = PBXGroup;
children = (
3B6B0136F5F5B4ED2341A91B /* Pods_OCR_SDK.framework */,
);
name = "Recovered References";
sourceTree = "<group>";
};
9551056D2477746A0053036F = {
isa = PBXGroup;
children = (
95FAF56C24EA83C900C161F2 /* Avoid glare.png */,
95FAF56D24EA83C900C161F2 /* Do not place outside.png */,
95FAF56B24EA83C800C161F2 /* Place within the box.png */,
95FAF51F24EA3FEE00C161F2 /* Button_Do@2x.png */,
95FAF51D24EA3FE300C161F2 /* Caution@2x.png */,
95A6BA6124E1627800A34ADD /* back_light.png */,
956BB5B724DD31F6000C88D2 /* Scan-1@2x.png */,
956BB5B824DD31F6000C88D2 /* Scan-3@2x.png */,
956BB5B924DD31F7000C88D2 /* Scan-4@2x.png */,
956BB5BA24DD31F7000C88D2 /* Scan-5@2x.png */,
956BB5AD24DCFFB1000C88D2 /* Brighness@2x.png */,
956BB5AC24DCFFB1000C88D2 /* Glasses@2x.png */,
956BB5AB24DCFFB1000C88D2 /* Hat@2x.png */,
956BB5AE24DCFFB2000C88D2 /* Holdphone@2x.png */,
955105792477746A0053036F /* OCR-SDK */,
955105782477746A0053036F /* Products */,
6728428CA4AB34E672571668 /* Pods */,
BF236FF2605D4B46583CACB8 /* Frameworks */,
9526FFB224C8176300FF24D5 /* Recovered References */,
);
sourceTree = "<group>";
};
955105782477746A0053036F /* Products */ = {
isa = PBXGroup;
children = (
955105772477746A0053036F /* SB_KYC_SDK.framework */,
);
name = Products;
sourceTree = "<group>";
};
955105792477746A0053036F /* OCR-SDK */ = {
isa = PBXGroup;
children = (
957DF5F824C036E100FE6A67 /* FileSSL */,
9580134A248A25E700846F8A /* Model */,
95801345248A237000846F8A /* ModelDataHandler */,
95510582247774CC0053036F /* service */,
95510596247774CC0053036F /* UI */,
95510584247774CC0053036F /* Utils */,
9551057A2477746A0053036F /* OCR_SDK.h */,
9551057B2477746A0053036F /* Info.plist */,
955105C9247775290053036F /* SB_KYC_SDK.swift */,
9546DDC2247D1FA200AF50DE /* Localizable.strings */,
);
path = "OCR-SDK";
sourceTree = "<group>";
};
95510582247774CC0053036F /* service */ = {
isa = PBXGroup;
children = (
95510583247774CC0053036F /* SBOCRRequest.swift */,
);
path = service;
sourceTree = "<group>";
};
95510584247774CC0053036F /* Utils */ = {
isa = PBXGroup;
children = (
95801348248A25BC00846F8A /* CVPixelBufferExtension.swift */,
95510585247774CC0053036F /* ExtUiViewController.swift */,
95510586247774CC0053036F /* Loadding.swift */,
95510587247774CC0053036F /* image */,
9546DDB4247D171500AF50DE /* ExtString.swift */,
9546DDDB247E197800AF50DE /* Global.swift */,
955BEC4B249083A1001FB052 /* SBValidateInput.swift */,
955BEC4D249098C2001FB052 /* ExtUIColor.swift */,
);
path = Utils;
sourceTree = "<group>";
};
95510587247774CC0053036F /* image */ = {
isa = PBXGroup;
children = (
956BB56D24DBB9B7000C88D2 /* Back@2x.png */,
955E7AE224D967CE0048FC06 /* Passport-2 copy@2x.png */,
955E7AE024D967BD0048FC06 /* Car-2 copy@2x.png */,
955E7ADE24D967B10048FC06 /* Card-2@2x.png */,
955E7AC824D957140048FC06 /* Next@2x.png */,
95FAB2662499C89400CE7913 /* rotate.png */,
955BECE524935A14001FB052 /* ic_record.png */,
955105F92477B52C0053036F /* back.png */,
95510588247774CC0053036F /* TutorialFace1.png */,
95510589247774CC0053036F /* TutorialFaceP.png */,
9551058A247774CC0053036F /* TutorialFace3.png */,
9551058B247774CC0053036F /* TutorialFace2.png */,
9551058D247774CC0053036F /* TutorialFaceCheckBox.png */,
9551058E247774CC0053036F /* background.png */,
9551058F247774CC0053036F /* iconCap.png */,
95510590247774CC0053036F /* Screen Shot 2020-05-12 at 15.14.44.png */,
95510593247774CC0053036F /* cmndF1.png */,
95510595247774CC0053036F /* cmndF2.png */,
);
path = image;
sourceTree = "<group>";
};
95510596247774CC0053036F /* UI */ = {
isa = PBXGroup;
children = (
9580130C2489F1C100846F8A /* SBKRecordFace */,
95510597247774CC0053036F /* SBKTutorialFace */,
9551059A247774CC0053036F /* SBKTutorial */,
9551059F247774CC0053036F /* SBKShowDataPhoto */,
955105A2247774CC0053036F /* SBKShowDataFace */,
955105A5247774CC0053036F /* SBKCaptureFace */,
955105A8247774CC0053036F /* SBKCaptureCard */,
);
path = UI;
sourceTree = "<group>";
};
95510597247774CC0053036F /* SBKTutorialFace */ = {
isa = PBXGroup;
children = (
95510598247774CC0053036F /* SBKTutorialFaceVC.xib */,
95510599247774CC0053036F /* SBKTutorialFaceVC.swift */,
);
path = SBKTutorialFace;
sourceTree = "<group>";
};
9551059A247774CC0053036F /* SBKTutorial */ = {
isa = PBXGroup;
children = (
9551059B247774CC0053036F /* SBKTutorialVC.swift */,
9551059C247774CC0053036F /* SBKTutorialVC.xib */,
);
path = SBKTutorial;
sourceTree = "<group>";
};
9551059F247774CC0053036F /* SBKShowDataPhoto */ = {
isa = PBXGroup;
children = (
955105A0247774CC0053036F /* SBKResultCapture.xib */,
955105A1247774CC0053036F /* SBKResultCapture.swift */,
);
path = SBKShowDataPhoto;
sourceTree = "<group>";
};
955105A2247774CC0053036F /* SBKShowDataFace */ = {
isa = PBXGroup;
children = (
955105A3247774CC0053036F /* SBKResultFaceVC.swift */,
955105A4247774CC0053036F /* SBKResultFaceVC.xib */,
);
path = SBKShowDataFace;
sourceTree = "<group>";
};
955105A5247774CC0053036F /* SBKCaptureFace */ = {
isa = PBXGroup;
children = (
955105A6247774CC0053036F /* SBKCaptureFaceVC.xib */,
955105A7247774CC0053036F /* SBKCaptureFaceVC.swift */,
);
path = SBKCaptureFace;
sourceTree = "<group>";
};
955105A8247774CC0053036F /* SBKCaptureCard */ = {
isa = PBXGroup;
children = (
9546DDD2247D2C0C00AF50DE /* SBKCaptureCardVC.xib */,
955105AA247774CC0053036F /* SBKCaptureCardVC.swift */,
);
path = SBKCaptureCard;
sourceTree = "<group>";
};
957DF5F824C036E100FE6A67 /* FileSSL */ = {
isa = PBXGroup;
children = (
957DF5F224C035C700FE6A67 /* objcio.cer */,
);
path = FileSSL;
sourceTree = "<group>";
};
9580130C2489F1C100846F8A /* SBKRecordFace */ = {
isa = PBXGroup;
children = (
9580130D2489F1EA00846F8A /* SBKRecordFace.swift */,
9580130E2489F1EA00846F8A /* SBKRecordFace.xib */,
);
path = SBKRecordFace;
sourceTree = "<group>";
};
95801345248A237000846F8A /* ModelDataHandler */ = {
isa = PBXGroup;
children = (
95801346248A237000846F8A /* SBKModelDataHandler.swift */,
);
path = ModelDataHandler;
sourceTree = "<group>";
};
9580134A248A25E700846F8A /* Model */ = {
isa = PBXGroup;
children = (
95FAB20A24986B9600CE7913 /* valid_idcard.tflite */,
95182D0524B3343D00405EA9 /* liveness.tflite */,
);
path = Model;
sourceTree = "<group>";
};
BF236FF2605D4B46583CACB8 /* Frameworks */ = {
isa = PBXGroup;
children = (
B6D65EE1B3D4F09B622C686E /* Pods_OCR_SDK.framework */,
);
name = Frameworks;
sourceTree = "<group>";
};
/* End PBXGroup section */
/* Begin PBXHeadersBuildPhase section */
955105722477746A0053036F /* Headers */ = {
isa = PBXHeadersBuildPhase;
buildActionMask = 2147483647;
files = (
9551057C2477746A0053036F /* OCR_SDK.h in Headers */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXHeadersBuildPhase section */
/* Begin PBXNativeTarget section */
955105762477746A0053036F /* OCR-SDK */ = {
isa = PBXNativeTarget;
buildConfigurationList = 9551057F2477746A0053036F /* Build configuration list for PBXNativeTarget "OCR-SDK" */;
buildPhases = (
CF7838469583816942054598 /* [CP] Check Pods Manifest.lock */,
955105722477746A0053036F /* Headers */,
955105732477746A0053036F /* Sources */,
955105742477746A0053036F /* Frameworks */,
955105752477746A0053036F /* Resources */,
958D36C324C18BB1004B27EB /* Embed Frameworks */,
);
buildRules = (
);
dependencies = (
);
name = "OCR-SDK";
productName = "OCR-SDK";
productReference = 955105772477746A0053036F /* SB_KYC_SDK.framework */;
productType = "com.apple.product-type.framework";
};
/* End PBXNativeTarget section */
/* Begin PBXProject section */
9551056E2477746A0053036F /* Project object */ = {
isa = PBXProject;
attributes = {
LastUpgradeCheck = 1140;
ORGANIZATIONNAME = itsol;
TargetAttributes = {
955105762477746A0053036F = {
CreatedOnToolsVersion = 11.4.1;
};
};
};
buildConfigurationList = 955105712477746A0053036F /* Build configuration list for PBXProject "OCR-SDK" */;
compatibilityVersion = "Xcode 9.3";
developmentRegion = en;
hasScannedForEncodings = 0;
knownRegions = (
en,
vi,
Base,
);
mainGroup = 9551056D2477746A0053036F;
productRefGroup = 955105782477746A0053036F /* Products */;
projectDirPath = "";
projectRoot = "";
targets = (
955105762477746A0053036F /* OCR-SDK */,
);
};
/* End PBXProject section */
/* Begin PBXResourcesBuildPhase section */
955105752477746A0053036F /* Resources */ = {
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
95FAF56E24EA83C900C161F2 /* Place within the box.png in Resources */,
95FAF56F24EA83C900C161F2 /* Avoid glare.png in Resources */,
95FAF57024EA83C900C161F2 /* Do not place outside.png in Resources */,
95FAF52024EA3FEE00C161F2 /* Button_Do@2x.png in Resources */,
95FAF51E24EA3FE300C161F2 /* Caution@2x.png in Resources */,
95A6BA6224E1627800A34ADD /* back_light.png in Resources */,
956BB5BB24DD31F7000C88D2 /* Scan-1@2x.png in Resources */,
956BB5BC24DD31F7000C88D2 /* Scan-3@2x.png in Resources */,
956BB5BD24DD31F7000C88D2 /* Scan-4@2x.png in Resources */,
956BB5BE24DD31F7000C88D2 /* Scan-5@2x.png in Resources */,
956BB5AF24DCFFB2000C88D2 /* Hat@2x.png in Resources */,
956BB5B024DCFFB2000C88D2 /* Glasses@2x.png in Resources */,
956BB5B124DCFFB2000C88D2 /* Brighness@2x.png in Resources */,
956BB5B224DCFFB2000C88D2 /* Holdphone@2x.png in Resources */,
956BB56E24DBB9B7000C88D2 /* Back@2x.png in Resources */,
955E7AE324D967CE0048FC06 /* Passport-2 copy@2x.png in Resources */,
955E7AE124D967BD0048FC06 /* Car-2 copy@2x.png in Resources */,
955E7ADF24D967B20048FC06 /* Card-2@2x.png in Resources */,
955E7AC924D957140048FC06 /* Next@2x.png in Resources */,
957DF5F324C035C700FE6A67 /* objcio.cer in Resources */,
95182D0624B3343E00405EA9 /* liveness.tflite in Resources */,
95FAB2672499C89400CE7913 /* rotate.png in Resources */,
95FAB20B24986B9600CE7913 /* valid_idcard.tflite in Resources */,
955BECE624935A14001FB052 /* ic_record.png in Resources */,
955105FA2477B52C0053036F /* back.png in Resources */,
955105B6247774CC0053036F /* Screen Shot 2020-05-12 at 15.14.44.png in Resources */,
955105AE247774CC0053036F /* TutorialFace1.png in Resources */,
955105B4247774CC0053036F /* background.png in Resources */,
955105BB247774CC0053036F /* cmndF2.png in Resources */,
955105C4247774CC0053036F /* SBKResultFaceVC.xib in Resources */,
955105B5247774CC0053036F /* iconCap.png in Resources */,
955105B9247774CC0053036F /* cmndF1.png in Resources */,
955105B1247774CC0053036F /* TutorialFace2.png in Resources */,
955105B3247774CC0053036F /* TutorialFaceCheckBox.png in Resources */,
955105BC247774CC0053036F /* SBKTutorialFaceVC.xib in Resources */,
955105C5247774CC0053036F /* SBKCaptureFaceVC.xib in Resources */,
955105BF247774CC0053036F /* SBKTutorialVC.xib in Resources */,
9546DDC0247D1FA200AF50DE /* Localizable.strings in Resources */,
958013102489F1EA00846F8A /* SBKRecordFace.xib in Resources */,
955105C1247774CC0053036F /* SBKResultCapture.xib in Resources */,
955105B0247774CC0053036F /* TutorialFace3.png in Resources */,
955105AF247774CC0053036F /* TutorialFaceP.png in Resources */,
9546DDD0247D2C0C00AF50DE /* SBKCaptureCardVC.xib in Resources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXResourcesBuildPhase section */
/* Begin PBXShellScriptBuildPhase section */
CF7838469583816942054598 /* [CP] Check Pods Manifest.lock */ = {
isa = PBXShellScriptBuildPhase;
buildActionMask = 2147483647;
files = (
);
inputFileListPaths = (
);
inputPaths = (
"${PODS_PODFILE_DIR_PATH}/Podfile.lock",
"${PODS_ROOT}/Manifest.lock",
);
name = "[CP] Check Pods Manifest.lock";
outputFileListPaths = (
);
outputPaths = (
"$(DERIVED_FILE_DIR)/Pods-OCR-SDK-checkManifestLockResult.txt",
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n";
showEnvVarsInLog = 0;
};
/* End PBXShellScriptBuildPhase section */
/* Begin PBXSourcesBuildPhase section */
955105732477746A0053036F /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
955105AD247774CC0053036F /* Loadding.swift in Sources */,
955105CA247775290053036F /* SB_KYC_SDK.swift in Sources */,
955105AC247774CC0053036F /* ExtUiViewController.swift in Sources */,
955BEC4C249083A1001FB052 /* SBValidateInput.swift in Sources */,
955105C6247774CC0053036F /* SBKCaptureFaceVC.swift in Sources */,
955105C8247774CC0053036F /* SBKCaptureCardVC.swift in Sources */,
95801349248A25BC00846F8A /* CVPixelBufferExtension.swift in Sources */,
955105C3247774CC0053036F /* SBKResultFaceVC.swift in Sources */,
955105AB247774CC0053036F /* SBOCRRequest.swift in Sources */,
955105BE247774CC0053036F /* SBKTutorialVC.swift in Sources */,
9546DDDC247E197800AF50DE /* Global.swift in Sources */,
9580130F2489F1EA00846F8A /* SBKRecordFace.swift in Sources */,
95801347248A237000846F8A /* SBKModelDataHandler.swift in Sources */,
955105BD247774CC0053036F /* SBKTutorialFaceVC.swift in Sources */,
955105C2247774CC0053036F /* SBKResultCapture.swift in Sources */,
9546DDB5247D171500AF50DE /* ExtString.swift in Sources */,
955BEC4E249098C2001FB052 /* ExtUIColor.swift in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXSourcesBuildPhase section */
/* Begin PBXVariantGroup section */
9546DDC2247D1FA200AF50DE /* Localizable.strings */ = {
isa = PBXVariantGroup;
children = (
9546DDC1247D1FA200AF50DE /* en */,
9546DDC3247D1FAA00AF50DE /* vi */,
);
name = Localizable.strings;
sourceTree = "<group>";
};
9546DDD2247D2C0C00AF50DE /* SBKCaptureCardVC.xib */ = {
isa = PBXVariantGroup;
children = (
9546DDD1247D2C0C00AF50DE /* Base */,
9546DDD4247D2C1700AF50DE /* vi */,
9546DDD6247D2C1A00AF50DE /* en */,
);
name = SBKCaptureCardVC.xib;
sourceTree = "<group>";
};
/* End PBXVariantGroup section */
/* Begin XCBuildConfiguration section */
9551057D2477746A0053036F /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ANALYZER_NONNULL = YES;
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++14";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_ENABLE_OBJC_WEAK = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
COPY_PHASE_STRIP = NO;
CURRENT_PROJECT_VERSION = 1;
DEBUG_INFORMATION_FORMAT = dwarf;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_TESTABILITY = YES;
GCC_C_LANGUAGE_STANDARD = gnu11;
GCC_DYNAMIC_NO_PIC = NO;
GCC_NO_COMMON_BLOCKS = YES;
GCC_OPTIMIZATION_LEVEL = 0;
GCC_PREPROCESSOR_DEFINITIONS = (
"DEBUG=1",
"$(inherited)",
);
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 11.0;
MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
MTL_FAST_MATH = YES;
ONLY_ACTIVE_ARCH = YES;
SDKROOT = iphoneos;
SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG;
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
VERSIONING_SYSTEM = "apple-generic";
VERSION_INFO_PREFIX = "";
};
name = Debug;
};
9551057E2477746A0053036F /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ANALYZER_NONNULL = YES;
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++14";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_ENABLE_OBJC_WEAK = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
COPY_PHASE_STRIP = NO;
CURRENT_PROJECT_VERSION = 1;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
GCC_C_LANGUAGE_STANDARD = gnu11;
GCC_NO_COMMON_BLOCKS = YES;
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 11.0;
MTL_ENABLE_DEBUG_INFO = NO;
MTL_FAST_MATH = YES;
ONLY_ACTIVE_ARCH = YES;
SDKROOT = iphoneos;
SWIFT_COMPILATION_MODE = wholemodule;
SWIFT_OPTIMIZATION_LEVEL = "-O";
VALIDATE_PRODUCT = YES;
VERSIONING_SYSTEM = "apple-generic";
VERSION_INFO_PREFIX = "";
};
name = Release;
};
955105802477746A0053036F /* Debug */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = 2A440D461209C526DEA3FD58 /* Pods-OCR-SDK.debug.xcconfig */;
buildSettings = {
CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic;
DEFINES_MODULE = YES;
DEVELOPMENT_TEAM = BFUSF2KZ79;
DYLIB_COMPATIBILITY_VERSION = 1;
DYLIB_CURRENT_VERSION = 1;
DYLIB_INSTALL_NAME_BASE = "@rpath";
INFOPLIST_FILE = "OCR-SDK/Info.plist";
INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks";
IPHONEOS_DEPLOYMENT_TARGET = 11.0;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
"@loader_path/Frameworks",
);
PRODUCT_BUNDLE_IDENTIFIER = "itsol.OCR-SDK";
PRODUCT_NAME = SB_KYC_SDK;
SKIP_INSTALL = YES;
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Debug;
};
955105812477746A0053036F /* Release */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = 8C1C048EB777A910827003CA /* Pods-OCR-SDK.release.xcconfig */;
buildSettings = {
CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic;
DEFINES_MODULE = YES;
DEVELOPMENT_TEAM = BFUSF2KZ79;
DYLIB_COMPATIBILITY_VERSION = 1;
DYLIB_CURRENT_VERSION = 1;
DYLIB_INSTALL_NAME_BASE = "@rpath";
INFOPLIST_FILE = "OCR-SDK/Info.plist";
INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks";
IPHONEOS_DEPLOYMENT_TARGET = 11.0;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
"@loader_path/Frameworks",
);
PRODUCT_BUNDLE_IDENTIFIER = "itsol.OCR-SDK";
PRODUCT_NAME = SB_KYC_SDK;
SKIP_INSTALL = YES;
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Release;
};
/* End XCBuildConfiguration section */
/* Begin XCConfigurationList section */
955105712477746A0053036F /* Build configuration list for PBXProject "OCR-SDK" */ = {
isa = XCConfigurationList;
buildConfigurations = (
9551057D2477746A0053036F /* Debug */,
9551057E2477746A0053036F /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
9551057F2477746A0053036F /* Build configuration list for PBXNativeTarget "OCR-SDK" */ = {
isa = XCConfigurationList;
buildConfigurations = (
955105802477746A0053036F /* Debug */,
955105812477746A0053036F /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
/* End XCConfigurationList section */
};
rootObject = 9551056E2477746A0053036F /* Project object */;
}
<?xml version="1.0" encoding="UTF-8"?>
<Workspace
version = "1.0">
<FileRef
location = "self:OCR-SDK.xcodeproj">
</FileRef>
</Workspace>
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>IDEDidComputeMac32BitWarning</key>
<true/>
</dict>
</plist>
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "1140"
version = "1.3">
<BuildAction
parallelizeBuildables = "YES"
buildImplicitDependencies = "YES">
<BuildActionEntries>
<BuildActionEntry
buildForTesting = "YES"
buildForRunning = "YES"
buildForProfiling = "YES"
buildForArchiving = "YES"
buildForAnalyzing = "YES">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "955105762477746A0053036F"
BuildableName = "SB_KYC_SDK.framework"
BlueprintName = "OCR-SDK"
ReferencedContainer = "container:OCR-SDK.xcodeproj">
</BuildableReference>
</BuildActionEntry>
</BuildActionEntries>
</BuildAction>
<TestAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
shouldUseLaunchSchemeArgsEnv = "YES">
<Testables>
</Testables>
</TestAction>
<LaunchAction
buildConfiguration = "Release"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
launchStyle = "0"
useCustomWorkingDirectory = "NO"
ignoresPersistentStateOnLaunch = "NO"
debugDocumentVersioning = "YES"
debugServiceExtension = "internal"
allowLocationSimulation = "YES">
</LaunchAction>
<ProfileAction
buildConfiguration = "Release"
shouldUseLaunchSchemeArgsEnv = "YES"
savedToolIdentifier = ""
useCustomWorkingDirectory = "NO"
debugDocumentVersioning = "YES">
<MacroExpansion>
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "955105762477746A0053036F"
BuildableName = "SB_KYC_SDK.framework"
BlueprintName = "OCR-SDK"
ReferencedContainer = "container:OCR-SDK.xcodeproj">
</BuildableReference>
</MacroExpansion>
</ProfileAction>
<AnalyzeAction
buildConfiguration = "Debug">
</AnalyzeAction>
<ArchiveAction
buildConfiguration = "Release"
revealArchiveInOrganizer = "YES">
</ArchiveAction>
</Scheme>
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>SchemeUserState</key>
<dict>
<key>OCR-SDK.xcscheme_^#shared#^_</key>
<dict>
<key>orderHint</key>
<integer>3</integer>
</dict>
</dict>
<key>SuppressBuildableAutocreation</key>
<dict>
<key>955105762477746A0053036F</key>
<dict>
<key>primary</key>
<true/>
</dict>
</dict>
</dict>
</plist>
<?xml version="1.0" encoding="UTF-8"?>
<Workspace
version = "1.0">
<FileRef
location = "group:OCR-SDK.xcodeproj">
</FileRef>
<FileRef
location = "group:Pods/Pods.xcodeproj">
</FileRef>
</Workspace>
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>IDEDidComputeMac32BitWarning</key>
<true/>
</dict>
</plist>
<?xml version="1.0" encoding="UTF-8"?>
<Bucket
uuid = "B08B0591-1A45-4415-9C04-B83659FBFE35"
type = "0"
version = "2.0">
</Bucket>
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>$(DEVELOPMENT_LANGUAGE)</string>
<key>CFBundleExecutable</key>
<string>$(EXECUTABLE_NAME)</string>
<key>CFBundleIdentifier</key>
<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>$(PRODUCT_NAME)</string>
<key>CFBundlePackageType</key>
<string>$(PRODUCT_BUNDLE_PACKAGE_TYPE)</string>
<key>CFBundleShortVersionString</key>
<string>1.0</string>
<key>CFBundleVersion</key>
<string>$(CURRENT_PROJECT_VERSION)</string>
</dict>
</plist>
// Copyright 2019 The TensorFlow Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import CoreImage
import TensorFlowLite
import UIKit
import Accelerate
/// A result from invoking the `Interpreter`.
struct Result {
let inferenceTime: Double
let inferences: [Inference]
}
/// An inference from invoking the `Interpreter`.
struct Inference {
let confidence: Float
let label: String
}
/// Information about a model file or labels file.
typealias FileInfo = (name: String, extension: String)
/// Information about the MobileNet model.
enum MobileNet {
static let modelInfo: FileInfo = (name: "liveness", extension: "tflite")
static let cardModel: FileInfo = (name: "valid_idcard", extension: "tflite")
}
/// This class handles all data preprocessing and makes calls to run inference on a given frame
/// by invoking the `Interpreter`. It then formats the inferences obtained and returns the top N
/// results for a successful inference.
class SBKModelDataHandler {
// MARK: - Internal Properties
/// The current thread count used by the TensorFlow Lite Interpreter.
let threadCount: Int
let resultCount = 3
let threadCountLimit = 10
// MARK: - Model Parameters
let batchSize = 1
let inputChannels = 3
let inputWidth = 224
let inputHeight = 224
// MARK: - Private Properties
/// List of labels from the given labels file.
private var labels: [String] = []
/// TensorFlow Lite `Interpreter` object for performing inference on a given model.
private var interpreter: Interpreter
/// Information about the alpha component in RGBA data.
private let alphaComponent = (baseOffset: 4, moduloRemainder: 3)
// MARK: - Initialization
/// A failable initializer for `ModelDataHandler`. A new instance is created if the model and
/// labels files are successfully loaded from the app's main bundle. Default `threadCount` is 1.
init?(modelFileInfo: FileInfo, threadCount: Int = 1) {
let modelFilename = modelFileInfo.name
// Construct the path to the model file.
let bundle = Bundle(for: SBKRecordFace.self)
guard let modelPath = bundle.path(
forResource: modelFilename,
ofType: modelFileInfo.extension
) else {
print("Failed to load the model file with name: \(modelFilename).")
return nil
}
let delegate = MetalDelegate()
// Specify the options for the `Interpreter`.
self.threadCount = threadCount
var options = Interpreter.Options()
options.threadCount = threadCount
do {
// Create the `Interpreter`.
interpreter = try Interpreter(modelPath: modelPath, options: options, delegates: [delegate])
// Allocate memory for the model's input `Tensor`s.
try interpreter.allocateTensors()
} catch let error {
print("Failed to create the interpreter with error: \(error.localizedDescription)")
return nil
}
}
func fromImage(image: UIImage, datas: Data, imagesss: UIImage) -> UIColor {
var totalR: CGFloat = 0
var totalG: CGFloat = 0
var totalB: CGFloat = 0
var count: CGFloat = 0
for x in 0..<Int(image.size.width) {
for y in 0..<Int(image.size.height) {
count += 1
var rF: CGFloat = 0,
gF: CGFloat = 0,
bF: CGFloat = 0,
aF: CGFloat = 0
image.getPixelColor(pos: CGPoint(x: x, y: y), dataImage: datas, image: imagesss ).getRed(&rF, green: &gF, blue: &bF, alpha: &aF)
totalR += rF
totalG += gF
totalB += bF
}
}
let averageR = totalR / count
let averageG = totalG / count
let averageB = totalB / count
return UIColor(red: averageR, green: averageG, blue: averageB, alpha: 1.0)
}
func convert(cmage:CIImage) -> UIImage
{
let context:CIContext = CIContext.init(options: nil)
let cgImage:CGImage = context.createCGImage(cmage, from: cmage.extent)!
let image:UIImage = UIImage.init(cgImage: cgImage)
return image
}
// MARK: - Internal Methods
/// Performs image preprocessing, invokes the `Interpreter`, and processes the inference results.
func runModel(onFrame pixelBuffer: CVPixelBuffer) -> [Float]? {
let sourcePixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer)
assert(sourcePixelFormat == kCVPixelFormatType_32ARGB ||
sourcePixelFormat == kCVPixelFormatType_32BGRA ||
sourcePixelFormat == kCVPixelFormatType_32RGBA)
let imageChannels = 4
assert(imageChannels >= inputChannels)
// Crops the image to the biggest square in the center and scales it down to model dimensions.
let scaledSize = CGSize(width: inputWidth, height: inputHeight)
guard let thumbnailPixelBuffer = pixelBuffer.centerThumbnail(ofSize: scaledSize) else {
return nil
}
let interval: TimeInterval
let outputTensor: Tensor
do {
let inputTensor = try interpreter.input(at: 0)
// Remove the alpha component from the image buffer to get the RGB data.
guard let rgbData = rgbDataFromBuffer(
thumbnailPixelBuffer,
byteCount: batchSize * inputWidth * inputHeight * inputChannels,
isModelQuantized: inputTensor.dataType == .uInt8
) else {
print("Failed to convert the image buffer to RGB data.")
return nil
}
let imageCap = UIImage(data: rgbData)
// self.fromImage(image: imageCap!, datas: rgbData, imagesss: imageCap!)
// Copy the RGB data to the input `Tensor`.
try interpreter.copy(rgbData, toInputAt: 0)
// Run inference by invoking the `Interpreter`.
let startDate = Date()
try interpreter.invoke()
interval = Date().timeIntervalSince(startDate) * 1000
// Get the output `Tensor` to process the inference results.
outputTensor = try interpreter.output(at: 0)
} catch let error {
print("Failed to invoke the interpreter with error: \(error.localizedDescription)")
return nil
}
let results: [Float]
switch outputTensor.dataType {
case .uInt8:
guard let quantization = outputTensor.quantizationParameters else {
print("No results returned because the quantization values for the output tensor are nil.")
return nil
}
let quantizedResults = [UInt8](outputTensor.data)
results = quantizedResults.map {
quantization.scale * Float(Int($0) - quantization.zeroPoint)
}
case .float32:
results = [Float32](unsafeData: outputTensor.data) ?? []
default:
print("Output tensor data type \(outputTensor.dataType) is unsupported for this example app.")
return nil
}
return results
}
/// Returns the RGB data representation of the given image buffer with the specified `byteCount`.
///
/// - Parameters
/// - buffer: The pixel buffer to convert to RGB data.
/// - byteCount: The expected byte count for the RGB data calculated using the values that the
/// model was trained on: `batchSize * imageWidth * imageHeight * componentsCount`.
/// - isModelQuantized: Whether the model is quantized (i.e. fixed point values rather than
/// floating point values).
/// - Returns: The RGB data representation of the image buffer or `nil` if the buffer could not be
/// converted.
private func rgbDataFromBuffer(
_ buffer: CVPixelBuffer,
byteCount: Int,
isModelQuantized: Bool
) -> Data? {
CVPixelBufferLockBaseAddress(buffer, .readOnly)
defer {
CVPixelBufferUnlockBaseAddress(buffer, .readOnly)
}
guard let sourceData = CVPixelBufferGetBaseAddress(buffer) else {
return nil
}
let width = CVPixelBufferGetWidth(buffer)
let height = CVPixelBufferGetHeight(buffer)
let sourceBytesPerRow = CVPixelBufferGetBytesPerRow(buffer)
let destinationChannelCount = 3
let destinationBytesPerRow = destinationChannelCount * width
var sourceBuffer = vImage_Buffer(data: sourceData,
height: vImagePixelCount(height),
width: vImagePixelCount(width),
rowBytes: sourceBytesPerRow)
guard let destinationData = malloc(height * destinationBytesPerRow) else {
print("Error: out of memory")
return nil
}
defer {
free(destinationData)
}
var destinationBuffer = vImage_Buffer(data: destinationData,
height: vImagePixelCount(height),
width: vImagePixelCount(width),
rowBytes: destinationBytesPerRow)
let pixelBufferFormat = CVPixelBufferGetPixelFormatType(buffer)
switch (pixelBufferFormat) {
case kCVPixelFormatType_32BGRA:
vImageConvert_BGRA8888toRGB888(&sourceBuffer, &destinationBuffer, UInt32(kvImageNoFlags))
case kCVPixelFormatType_32ARGB:
vImageConvert_ARGB8888toRGB888(&sourceBuffer, &destinationBuffer, UInt32(kvImageNoFlags))
case kCVPixelFormatType_32RGBA:
vImageConvert_RGBA8888toRGB888(&sourceBuffer, &destinationBuffer, UInt32(kvImageNoFlags))
default:
// Unknown pixel format.
return nil
}
let byteData = Data(bytes: destinationBuffer.data, count: destinationBuffer.rowBytes * height)
if isModelQuantized {
return byteData
}
// Not quantized, convert to floats
let bytes = Array<UInt8>(unsafeData: byteData)!
var floats = [Float]()
for i in 0..<bytes.count {
floats.append(Float(bytes[i]) / 255.0)
}
return Data(copyingBufferOf: floats)
}
}
// MARK: - Extensions
extension Data {
/// Creates a new buffer by copying the buffer pointer of the given array.
///
/// - Warning: The given array's element type `T` must be trivial in that it can be copied bit
/// for bit with no indirection or reference-counting operations; otherwise, reinterpreting
/// data from the resulting buffer has undefined behavior.
/// - Parameter array: An array with elements of type `T`.
init<T>(copyingBufferOf array: [T]) {
self = array.withUnsafeBufferPointer(Data.init)
}
}
extension Array {
/// Creates a new array from the bytes of the given unsafe data.
///
/// - Warning: The array's `Element` type must be trivial in that it can be copied bit for bit
/// with no indirection or reference-counting operations; otherwise, copying the raw bytes in
/// the `unsafeData`'s buffer to a new array returns an unsafe copy.
/// - Note: Returns `nil` if `unsafeData.count` is not a multiple of
/// `MemoryLayout<Element>.stride`.
/// - Parameter unsafeData: The data containing the bytes to turn into an array.
init?(unsafeData: Data) {
guard unsafeData.count % MemoryLayout<Element>.stride == 0 else { return nil }
#if swift(>=5.0)
self = unsafeData.withUnsafeBytes { .init($0.bindMemory(to: Element.self)) }
#else
self = unsafeData.withUnsafeBytes {
.init(UnsafeBufferPointer<Element>(
start: $0,
count: unsafeData.count / MemoryLayout<Element>.stride
))
}
#endif // swift(>=5.0)
}
}
extension UIImage {
func getPixelColor(pos: CGPoint, dataImage: Data, image: UIImage) -> UIColor {
let pixelData = image.cgImage!.dataProvider!.data
let data: UnsafePointer<UInt8> = CFDataGetBytePtr(pixelData)
let pixelInfo: Int = ((Int(image.size.width) * Int(pos.y)) + Int(pos.x)) * 4
let r = CGFloat(dataImage[pixelInfo]) / CGFloat(255.0)
let g = CGFloat(dataImage[pixelInfo+1]) / CGFloat(255.0)
let b = CGFloat(dataImage[pixelInfo+2]) / CGFloat(255.0)
let a = CGFloat(dataImage[pixelInfo+3]) / CGFloat(255.0)
return UIColor(red: r, green: g, blue: b, alpha: a)
}
}
//
// OCR_SDK.h
// OCR-SDK
//
// Created by itsol on 5/22/20.
// Copyright © 2020 itsol. All rights reserved.
//
#import <Foundation/Foundation.h>
//! Project version number for OCR_SDK.
FOUNDATION_EXPORT double OCR_SDKVersionNumber;
//! Project version string for OCR_SDK.
FOUNDATION_EXPORT const unsigned char OCR_SDKVersionString[];
// In this header, you should import all the public headers of your framework using statements like #import <OCR_SDK/PublicHeader.h>
//
// OCRSDK.swift
// OCR-SDK
//
// Created by itsol on 5/18/20.
// Copyright © 2020 itsol. All rights reserved.
//
import Foundation
import UIKit
public class SB_KYC_SDK {
public static let shared = SB_KYC_SDK()
public typealias CompletionHandle = (_ data: [String: Any]?) -> Void
public typealias CompletionHandleFace = (_ data: Bool) -> Void
let viewController = SBKTutorialVC(URLToken: "http://sdk.sb.gotai.ml")
//Khởi tạo framework
//input: viewCurrent, sdkConfig
public func initSDK(viewCurrent: UIViewController, sdkConfig: [String:Any], completion: @escaping CompletionHandle) {
DispatchQueue.main.async {
Global.language = sdkConfig["language"] as? String ?? Global.language
Global.url_font = sdkConfig["url_font"] as? String ?? Global.url_font
Global.url_back = sdkConfig["url_back"] as? String ?? Global.url_back
Global.url_face = sdkConfig["url_face"] as? String ?? Global.url_face
Global.header = sdkConfig["header"] as? [String: Any] ?? Global.header
Global.typeFace = sdkConfig["type_face"] as? String ?? Global.typeFace
Global.colorConfig = sdkConfig["color"] as? String ?? Global.colorConfig
Global.frontConfig = sdkConfig["front"] as? String ?? Global.frontConfig
Global.copyright = sdkConfig["copyright"] as? String ?? Global.copyright
Global.step = sdkConfig["step"] as? String ?? Global.step
Global.colorButtonNext = sdkConfig["colorBackgroundButtonNext"] as? String ?? Global.colorButtonNext
Global.colorButtonBack = sdkConfig["colorBackgroundButtonBack"] as? String ?? Global.colorButtonBack
Global.colorTextButtonNext = sdkConfig["colorTextButtonNext"] as? String ?? Global.colorTextButtonNext
Global.colorTextButtonBack = sdkConfig["colorTextButtonBack"] as? String ?? Global.colorTextButtonBack
Global.colorTextPreview = sdkConfig["colorTextPreview"] as? String ?? Global.colorTextPreview
let ratio = sdkConfig["ratioPass"] as? Int ?? 70
Global.ratioPass = Float(ratio)
Global.useFileCer = sdkConfig[""] as? Bool ?? Global.useFileCer
self.viewController.completionSuccessTutorial = { [weak self] data in
guard self != nil else { return }
completion(data)
}
viewCurrent.navigationController?.pushViewController(self.viewController, animated: true)
viewCurrent.navigationController?.setNavigationBarHidden(false, animated: false)
}
}
//Validate card
//input: UIImage
//output: Int, 0: Image fake, 1: Image front, 2: Image back, -1: error
public func eKycCardValid(image: CVPixelBuffer) -> Int {
let resultVaidate = SBKValidateInput.shared.didOutput(pixelBuffer: image)
switch resultVaidate {
case .ERROR:
return -1
case .IMAGEFAKE:
return 0
case .IMAGEFRONT:
return 1
case .IMAGEBACK:
return 2
}
}
//Validate face
//input: UIImage
public func eKycFaceValid(image: CVPixelBuffer, completion: @escaping CompletionHandleFace){
SBKValidateInput.shared.validateFace(imageFace: image) {[weak self] data in
guard self != nil else { return }
completion(data)
}
}
//Call API face
//input: UIImage
public func eKycFaceMatch(imageFace: CVPixelBuffer, imageCardFront: CVPixelBuffer, imageCardBack: CVPixelBuffer, completion: @escaping CompletionHandle) {
SBKValidateInput.shared.validateFace(imageFace: imageFace) {[weak self] data in
guard self != nil else { return }
if data {
Global.imageCard1 = SBKValidateInput.shared.convertCVPixelToUIImage(pixelBuffer: imageCardFront).pngData()
Global.imageCard2 = SBKValidateInput.shared.convertCVPixelToUIImage(pixelBuffer: imageCardFront).pngData()
let imageFaceUI = SBKValidateInput.shared.convertCVPixelToUIImage(pixelBuffer: imageFace).pngData()
SBOCRRequest.shared.processFace(image: imageFaceUI! , pathURL: Global.url_face, idBack: "", idFront: "") {(errorCode,data) -> Void in
guard errorCode == nil else {
let error: [String: Any] = ["error": errorCode!]
completion(error)
return
}
completion(data)
}
} else {
let error: [String: Any] = ["error": "image faild"]
completion(error)
}
}
}
}
<?xml version="1.0" encoding="UTF-8"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.XIB" version="3.0" toolsVersion="16097" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES">
<device id="retina4_0" orientation="portrait" appearance="light"/>
<dependencies>
<deployment identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="16087"/>
<capability name="Safe area layout guides" minToolsVersion="9.0"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<objects>
<placeholder placeholderIdentifier="IBFilesOwner" id="-1" userLabel="File's Owner" customClass="SBKCaptureCardVC" customModule="SB_KYC_SDK" customModuleProvider="target">
<connections>
<outlet property="btnCapture" destination="iFg-0d-HQS" id="r0w-Wm-TmG"/>
<outlet property="btnRotateScreen" destination="5Kv-HH-7wN" id="hTf-TX-opc"/>
<outlet property="imgCaution" destination="1Xg-nG-fUa" id="zDm-IE-Bd3"/>
<outlet property="lbCopyright" destination="2ya-Wd-kWv" id="tY2-Sz-HDv"/>
<outlet property="lbDescription" destination="xgg-bw-az4" id="jcv-Le-c8C"/>
<outlet property="lbStep" destination="hkr-WE-tyf" id="Unf-6k-abU"/>
<outlet property="view" destination="i5M-Pr-FkT" id="sfx-zR-JGt"/>
<outlet property="viewCamera" destination="f35-hN-bYH" id="dPK-9z-PIg"/>
</connections>
</placeholder>
<placeholder placeholderIdentifier="IBFirstResponder" id="-2" customClass="UIResponder"/>
<view clearsContextBeforeDrawing="NO" contentMode="scaleToFill" id="i5M-Pr-FkT">
<rect key="frame" x="0.0" y="0.0" width="320" height="568"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<subviews>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="wWe-FJ-IbP">
<rect key="frame" x="0.0" y="0.0" width="320" height="44"/>
<subviews>
<button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="8Qq-hu-j34">
<rect key="frame" x="5" y="9" width="40" height="30"/>
<constraints>
<constraint firstAttribute="height" constant="30" id="Ql8-m6-h1b"/>
<constraint firstAttribute="width" constant="40" id="hRw-3Y-0vy"/>
</constraints>
<inset key="imageEdgeInsets" minX="15" minY="5" maxX="10" maxY="5"/>
<state key="normal" image="Back.png"/>
<connections>
<action selector="onSelectBack:" destination="-1" eventType="touchUpInside" id="7KI-Xl-3iL"/>
</connections>
</button>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="Scan ID card" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="mDW-6Q-7Q8">
<rect key="frame" x="111.5" y="13.5" width="97.5" height="21"/>
<fontDescription key="fontDescription" type="system" pointSize="17"/>
<nil key="textColor"/>
<nil key="highlightedColor"/>
</label>
</subviews>
<color key="backgroundColor" red="0.88784521819999995" green="0.96387571100000002" blue="0.98960381750000004" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<constraints>
<constraint firstAttribute="bottom" secondItem="8Qq-hu-j34" secondAttribute="bottom" constant="5" id="CF1-Xd-vjJ"/>
<constraint firstItem="8Qq-hu-j34" firstAttribute="leading" secondItem="wWe-FJ-IbP" secondAttribute="leading" constant="5" id="EqL-bz-2qU"/>
<constraint firstAttribute="height" constant="44" id="NEQ-Ul-wxS"/>
<constraint firstItem="mDW-6Q-7Q8" firstAttribute="centerX" secondItem="wWe-FJ-IbP" secondAttribute="centerX" id="j9V-QH-cVY"/>
<constraint firstItem="mDW-6Q-7Q8" firstAttribute="centerY" secondItem="8Qq-hu-j34" secondAttribute="centerY" id="lNT-pw-THw"/>
</constraints>
</view>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="f35-hN-bYH">
<rect key="frame" x="0.0" y="44" width="320" height="524"/>
<color key="backgroundColor" white="0.66666666666666663" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
</view>
<label hidden="YES" opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="Use front" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="2ya-Wd-kWv">
<rect key="frame" x="124.5" y="273" width="71" height="20.5"/>
<fontDescription key="fontDescription" type="system" pointSize="17"/>
<color key="textColor" white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<nil key="highlightedColor"/>
</label>
<imageView hidden="YES" clipsSubviews="YES" userInteractionEnabled="NO" contentMode="scaleAspectFit" horizontalHuggingPriority="251" verticalHuggingPriority="251" image="Caution.png" translatesAutoresizingMaskIntoConstraints="NO" id="1Xg-nG-fUa">
<rect key="frame" x="100" y="335" width="120" height="32"/>
<constraints>
<constraint firstAttribute="width" constant="120" id="SPW-oE-ogJ"/>
<constraint firstAttribute="height" constant="32" id="Vu7-eU-bSy"/>
</constraints>
</imageView>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="place your" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="xgg-bw-az4">
<rect key="frame" x="120.5" y="387" width="79.5" height="20.5"/>
<fontDescription key="fontDescription" type="system" pointSize="17"/>
<color key="textColor" white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<nil key="highlightedColor"/>
</label>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="s0F-Op-Skc">
<rect key="frame" x="126.5" y="437.5" width="67" height="20.5"/>
<subviews>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="Step 1/3" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="hkr-WE-tyf">
<rect key="frame" x="10" y="3" width="47" height="14.5"/>
<color key="backgroundColor" white="0.0" alpha="0.0" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<fontDescription key="fontDescription" type="system" pointSize="12"/>
<color key="textColor" white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<nil key="highlightedColor"/>
</label>
</subviews>
<color key="backgroundColor" red="0.54183894399999999" green="0.5419179797" blue="0.54181402919999999" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<constraints>
<constraint firstAttribute="bottom" secondItem="hkr-WE-tyf" secondAttribute="bottom" constant="3" id="23F-mj-6Ie"/>
<constraint firstItem="hkr-WE-tyf" firstAttribute="leading" secondItem="s0F-Op-Skc" secondAttribute="leading" constant="10" id="Gax-23-RvT"/>
<constraint firstItem="hkr-WE-tyf" firstAttribute="top" secondItem="s0F-Op-Skc" secondAttribute="top" constant="3" id="Iqf-8t-1oN"/>
<constraint firstAttribute="trailing" secondItem="hkr-WE-tyf" secondAttribute="trailing" constant="10" id="j7v-pc-4ue"/>
</constraints>
<userDefinedRuntimeAttributes>
<userDefinedRuntimeAttribute type="number" keyPath="cornerRadius">
<integer key="value" value="3"/>
</userDefinedRuntimeAttribute>
</userDefinedRuntimeAttributes>
</view>
<button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="iFg-0d-HQS">
<rect key="frame" x="135" y="488" width="50" height="50"/>
<constraints>
<constraint firstAttribute="width" constant="50" id="1p8-eu-8ja"/>
<constraint firstAttribute="height" constant="50" id="U3i-fV-Hr4"/>
</constraints>
<state key="normal" image="iconCap.png"/>
<connections>
<action selector="onCapturePhoto:" destination="-1" eventType="touchUpInside" id="VyC-7u-wqF"/>
</connections>
</button>
<button hidden="YES" opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="5Kv-HH-7wN">
<rect key="frame" x="270" y="493" width="40" height="40"/>
<color key="backgroundColor" white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<constraints>
<constraint firstAttribute="height" constant="40" id="V8P-ut-anf"/>
<constraint firstAttribute="width" constant="40" id="wTy-Wh-mya"/>
</constraints>
<state key="normal" title="Button" image="rotate.png"/>
<connections>
<action selector="onSelectRotateScreen:" destination="-1" eventType="touchUpInside" id="vLx-q9-Irs"/>
</connections>
</button>
</subviews>
<color key="backgroundColor" red="0.88784521819999995" green="0.96387571100000002" blue="0.98960381750000004" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<constraints>
<constraint firstItem="iFg-0d-HQS" firstAttribute="top" secondItem="s0F-Op-Skc" secondAttribute="bottom" constant="30" id="0Hf-DN-0E8"/>
<constraint firstItem="fnl-2z-Ty3" firstAttribute="bottom" secondItem="iFg-0d-HQS" secondAttribute="bottom" constant="30" id="0wa-WH-Zg2"/>
<constraint firstItem="1Xg-nG-fUa" firstAttribute="centerX" secondItem="fnl-2z-Ty3" secondAttribute="centerX" id="84X-BO-Ptu"/>
<constraint firstAttribute="bottom" secondItem="f35-hN-bYH" secondAttribute="bottom" id="Eh6-Sz-Pbu"/>
<constraint firstItem="xgg-bw-az4" firstAttribute="top" secondItem="1Xg-nG-fUa" secondAttribute="bottom" constant="20" id="HcO-1y-XqV"/>
<constraint firstItem="f35-hN-bYH" firstAttribute="top" secondItem="wWe-FJ-IbP" secondAttribute="bottom" id="Ixh-GG-Lwd"/>
<constraint firstItem="wWe-FJ-IbP" firstAttribute="leading" secondItem="fnl-2z-Ty3" secondAttribute="leading" id="LVc-ZE-pTI"/>
<constraint firstItem="xgg-bw-az4" firstAttribute="centerX" secondItem="fnl-2z-Ty3" secondAttribute="centerX" id="NpR-uC-8VJ"/>
<constraint firstItem="f35-hN-bYH" firstAttribute="leading" secondItem="i5M-Pr-FkT" secondAttribute="leading" id="Qex-m2-Ka2"/>
<constraint firstAttribute="trailing" secondItem="f35-hN-bYH" secondAttribute="trailing" id="Rvr-U9-Mm7"/>
<constraint firstItem="fnl-2z-Ty3" firstAttribute="trailing" secondItem="wWe-FJ-IbP" secondAttribute="trailing" id="SZb-Ei-TtM"/>
<constraint firstItem="s0F-Op-Skc" firstAttribute="centerX" secondItem="fnl-2z-Ty3" secondAttribute="centerX" id="Tlp-LO-cf3"/>
<constraint firstItem="2ya-Wd-kWv" firstAttribute="centerX" secondItem="fnl-2z-Ty3" secondAttribute="centerX" id="VJC-7O-ftY"/>
<constraint firstItem="f35-hN-bYH" firstAttribute="top" secondItem="2ya-Wd-kWv" secondAttribute="bottom" constant="-249.5" id="jMM-Ki-02G"/>
<constraint firstItem="fnl-2z-Ty3" firstAttribute="trailing" secondItem="5Kv-HH-7wN" secondAttribute="trailing" constant="10" id="lpT-8c-BLV"/>
<constraint firstItem="iFg-0d-HQS" firstAttribute="centerX" secondItem="fnl-2z-Ty3" secondAttribute="centerX" id="nMf-Pk-jZq"/>
<constraint firstItem="s0F-Op-Skc" firstAttribute="top" secondItem="xgg-bw-az4" secondAttribute="bottom" constant="30" id="tix-0k-uRo"/>
<constraint firstItem="5Kv-HH-7wN" firstAttribute="centerY" secondItem="iFg-0d-HQS" secondAttribute="centerY" id="wBh-u4-atm"/>
<constraint firstItem="wWe-FJ-IbP" firstAttribute="top" secondItem="fnl-2z-Ty3" secondAttribute="top" id="zVg-x6-hYK"/>
</constraints>
<viewLayoutGuide key="safeArea" id="fnl-2z-Ty3"/>
<point key="canvasLocation" x="133" y="154"/>
</view>
</objects>
<resources>
<image name="Back.png" width="31.5" height="53.5"/>
<image name="Caution.png" width="84.5" height="84.5"/>
<image name="iconCap.png" width="172" height="172"/>
<image name="rotate.png" width="200" height="200"/>
</resources>
</document>
//
// CaptureCardVC.swift
//
// Created by itsol on 5/11/20.
// Copyright © 2020 itsol. All rights reserved.
//
import UIKit
import AVFoundation
class SBKCaptureCardVC: UIViewController, AVCapturePhotoCaptureDelegate {
@IBOutlet public weak var viewCamera: UIView!
@IBOutlet public weak var lbDescription: UILabel!
@IBOutlet weak var lbCopyright: UILabel!
@IBOutlet weak var btnRotateScreen: UIButton!
@IBOutlet weak var lbStep: UILabel!
@IBOutlet weak var btnCapture: UIButton!
@IBOutlet weak var imgCaution: UIImageView!
var captureSession: AVCaptureSession = AVCaptureSession()
var stillImageOutput: AVCapturePhotoOutput = AVCapturePhotoOutput()
var videoPreviewLayer: AVCaptureVideoPreviewLayer!
let videoDataOutput = AVCaptureVideoDataOutput()
var descriptionScreen: String = "Front of your personal card".localized()
var checkScreen: Int = 1 // checkScreen = 1: screen front card, checkScreen = 2: screen front
var idFront: String = ""
var URLToken: String = ""
var statusTakePhoto: Bool = true
var statusValidateImage: ValidateCard = .ERROR
var statusScreen: String = "vertical" //horizontal
let validateInput = SBKValidateInput.shared
var completionSuccessCard: ([String:Any])->Void = {_ in}
override func viewDidLoad() {
super.viewDidLoad()
self.btnRotateScreen.layer.cornerRadius = 20
}
init() {
super.init(nibName: "SBKCaptureCardVC", bundle: Bundle(for: SBKCaptureCardVC.self))
}
required public init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
func loadConfigUI() {
self.lbStep.text = String(format: "Step %d/3", checkScreen)
self.lbCopyright.textColor = UIColor.colorFromHexa(Global.colorConfig)
self.lbDescription.textColor = UIColor.colorFromHexa(Global.colorConfig)
if Global.frontConfig == "" {
self.lbCopyright.font = UIFont(name: Global.frontConfig, size: 17)
self.lbDescription.font = UIFont(name: Global.frontConfig, size: 17)
}
let label = UILabel()
label.frame = CGRect(x: self.view.center.x - 50, y: self.viewCamera.frame.size.height / 8 + self.viewCamera.frame.origin.y - 40, width: 100, height: 20)
label.textAlignment = .center
label.text = self.checkScreen == 1 ? "Use front" : "Use back"
label.textColor = UIColor.white
self.view.addSubview(label)
}
//Cài đặt máy ảnh
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
self.loadConfigUI()
self.lbDescription.text = self.descriptionScreen
captureSession.sessionPreset = .high
guard let backCamera = AVCaptureDevice.default(for: AVMediaType.video)
else {
print("Unable to access back camera!")
return
}
do {
let input = try AVCaptureDeviceInput(device: backCamera)
if captureSession.canAddInput(input) {
captureSession.addInput(input)
}
if captureSession.canAddOutput(stillImageOutput) {
captureSession.addOutput(stillImageOutput)
setupLivePreview()
}
}
catch let error {
print("Error Unable to initialize back camera: \(error.localizedDescription)")
}
self.getCameraFrames()
DispatchQueue.global(qos: .userInitiated).async {
self.captureSession.startRunning()
}
DispatchQueue.main.async {
self.videoPreviewLayer.frame = self.viewCamera.bounds
}
}
//set frame preview
func setupLivePreview() {
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
videoPreviewLayer.videoGravity = .resizeAspectFill
videoPreviewLayer.connection?.videoOrientation = .portrait
viewCamera.layer.addSublayer(videoPreviewLayer)
if let viewWithTag = self.viewCamera.viewWithTag(11) {
viewWithTag.removeFromSuperview()
}
if let viewWithTag = self.viewCamera.viewWithTag(22) {
viewWithTag.removeFromSuperview()
}
let overlay = createOverlay(frame: view.frame, xOffset: view.frame.midX, yOffset: view.frame.midY, radius: 50.0)
overlay.tag = 11
viewCamera.addSubview(overlay)
}
//set vùng đặt thẻ
func createOverlay(frame: CGRect, xOffset: CGFloat, yOffset: CGFloat, radius: CGFloat, colorBorder: CGColor = UIColor.clear.cgColor) -> UIView {
let overlayView = UIView(frame: frame)
overlayView.backgroundColor = UIColor.black.withAlphaComponent(0.6)
let path = CGMutablePath()
let fWidth = self.viewCamera.frame.size.width
let fHeight = self.viewCamera.frame.size.height
let leftSpace = fWidth/2 - (fHeight * 8 / 10 / 2 * 3 / 4)
if self.statusScreen == "horizontal" {
path.addRoundedRect(in: CGRect(x: leftSpace, y: self.viewCamera.frame.origin.y,
width: fHeight * 8 / 10 * 3 / 4, height: fHeight * 8 / 10 ),
cornerWidth: 20, cornerHeight: 20)
} else {
path.addRoundedRect(in: CGRect(x: fWidth/20, y: fHeight / 8,
width: fWidth * 18 / 20, height: fWidth * 18 * 3 / 20 / 4 ),
cornerWidth: 20, cornerHeight: 20)
}
let borderLayer = CAShapeLayer()
borderLayer.path = path // Reuse the Bezier path
borderLayer.fillColor = UIColor.clear.cgColor
borderLayer.strokeColor = colorBorder
borderLayer.lineWidth = 5
borderLayer.frame = overlayView.bounds
overlayView.layer.addSublayer(borderLayer)
path.addRect(CGRect(origin: .zero, size: overlayView.frame.size))
let maskLayer = CAShapeLayer()
maskLayer.backgroundColor = UIColor.black.cgColor
maskLayer.path = path
maskLayer.fillRule = .evenOdd
overlayView.layer.mask = maskLayer
overlayView.clipsToBounds = true
return overlayView
}
//Xử lý data sâu khi chụp
@available(iOS 11.0, *)
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
guard let imageData = photo.fileDataRepresentation()
else { return }
self.captureSession.stopRunning()
self.captureSession.removeOutput(self.videoDataOutput)
self.captureSession.removeOutput(self.stillImageOutput)
let viewController = SBKResultCapture()
viewController.imageData = imageData
viewController.checkScreen = self.checkScreen
viewController.idFront = self.idFront
viewController.URLToken = self.URLToken
viewController.statusScreen = self.statusScreen
viewController.completionSuccessResultCard = { [weak self] data in
guard let `self` = self else { return }
self.completionSuccessCard(data)
}
self.navigationController?.pushViewController(viewController, animated: true)
}
//Sự kiện chụp ảnh
@IBAction func onCapturePhoto(_ sender: Any) {
if (self.statusValidateImage == ValidateCard.IMAGEFRONT && self.checkScreen == 1) || (self.statusValidateImage == .IMAGEBACK && self.checkScreen == 2) {
if #available(iOS 11.0, *) {
let settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg])
stillImageOutput.capturePhoto(with: settings, delegate: self)
} else {
let settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecJPEG])
stillImageOutput.capturePhoto(with: settings, delegate: self)
}
}
}
func resizeImageCard(pixelBuffer: CVPixelBuffer) -> CVPixelBuffer {
let validateView = SBKValidateInput.shared
let imageInput = validateView.convertCVPixelToUIImage(pixelBuffer: pixelBuffer)
var imageOutput: UIImage?
if self.statusScreen == "horizontal" {
imageOutput = validateView.cropImageHorizontal(image: imageInput, rect: CGRect(x: imageInput.size.width * 1 / 10, y: imageInput.size.height * 3 / 20, width: imageInput.size.width * 8 / 10, height: imageInput.size.height * 9 / 10), scale: 1.0)
} else {
imageOutput = validateView.cropImage(image: imageInput, rect: CGRect(x: imageInput.size.width / 20, y: imageInput.size.height / 8, width: imageInput.size.width * 18 / 20, height: imageInput.size.width * 18 / 20 * 3 / 4), scale: 1.0)
}
let ciimage = CIImage(image: imageOutput!)
let tmpcontext = CIContext(options: nil)
let cgimage = tmpcontext.createCGImage(ciimage!, from: ciimage!.extent)
return validateView.convertCGImgeToCVPixelBuffer(forImage: cgimage!)!
}
@IBAction func onSelectRotateScreen(_ sender: Any) {
}
@IBAction func onSelectBack(_ sender: Any) {
if checkScreen == 1 {
self.navigationController?.popViewController(animated: true)
} else {
for controller in self.navigationController!.viewControllers as Array {
if controller.isKind(of: SBKCaptureCardVC.self) {
self.navigationController!.popToViewController(controller, animated: true)
break
}
}
}
}
}
extension SBKCaptureCardVC: AVCaptureVideoDataOutputSampleBufferDelegate {
private func getCameraFrames() {
self.videoDataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString) : NSNumber(value: kCVPixelFormatType_32BGRA)] as [String : Any]
self.videoDataOutput.alwaysDiscardsLateVideoFrames = true
self.videoDataOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "camera_frame_processing_queue"))
self.captureSession.addOutput(self.videoDataOutput)
guard let connection = self.videoDataOutput.connection(with: AVMediaType.video),
connection.isVideoOrientationSupported else { return }
connection.videoOrientation = .portrait
}
func captureOutput( _ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
guard let imageFrameInput = CMSampleBufferGetImageBuffer(sampleBuffer) else {
debugPrint("unable to get image from sample buffer")
return
}
let validateImageCard = validateInput.didOutput(pixelBuffer: self.resizeImageCard(pixelBuffer: imageFrameInput))
DispatchQueue.main.async {
if let viewWithTag = self.viewCamera.viewWithTag(11) {
viewWithTag.removeFromSuperview()
}
self.statusValidateImage = validateImageCard
if validateImageCard == ValidateCard.IMAGEFAKE || validateImageCard == .ERROR {
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = "Incorrect card, please check!".localized()
self.imgCaution.isHidden = false
self.btnCapture.setImage(UIImage(named: "Button_Do@2x", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil), for: .normal)
let overlay = self.createOverlay(frame: self.view.frame, xOffset: self.view.frame.midX, yOffset: self.view.frame.midY, radius: 50.0, colorBorder: UIColor.red.cgColor)
overlay.tag = 11
self.viewCamera.addSubview(overlay)
}
if validateImageCard == .IMAGEFRONT && self.checkScreen == 2 {
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = "Please put the back of the card in".localized()
self.btnCapture.setImage(UIImage(named: "Button_Do@2x", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil), for: .normal)
self.imgCaution.isHidden = false
let overlay = self.createOverlay(frame: self.view.frame, xOffset: self.view.frame.midX, yOffset: self.view.frame.midY, radius: 50.0, colorBorder: UIColor.red.cgColor)
overlay.tag = 11
self.viewCamera.addSubview(overlay)
}
if validateImageCard == .IMAGEBACK && self.checkScreen == 1 {
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = "Please put the front of the card in".localized()
self.btnCapture.setImage(UIImage(named: "Button_Do@2x", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil), for: .normal)
self.imgCaution.isHidden = false
let overlay = self.createOverlay(frame: self.view.frame, xOffset: self.view.frame.midX, yOffset: self.view.frame.midY, radius: 50.0, colorBorder: UIColor.red.cgColor)
overlay.tag = 11
self.viewCamera.addSubview(overlay)
}
if (self.statusValidateImage == ValidateCard.IMAGEFRONT && self.checkScreen == 1) || (self.statusValidateImage == .IMAGEBACK && self.checkScreen == 2) {
self.lbDescription.textColor = UIColor.colorFromHexa(Global.colorConfig)
self.lbDescription.text = "Are you ready. Let's start!".localized()
self.imgCaution.isHidden = true
self.btnCapture.setImage(UIImage(named: "iconCap", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil), for: .normal)
let overlay = self.createOverlay(frame: self.view.frame, xOffset: self.view.frame.midX, yOffset: self.view.frame.midY, radius: 50.0, colorBorder: UIColor.blue.cgColor)
overlay.tag = 11
self.viewCamera.addSubview(overlay)
}
}
}
}
//
// CaptureFaceVC.swift
// SSSSS
//
// Created by itsol on 5/12/20.
// Copyright © 2020 itsol. All rights reserved.
//
import UIKit
import AVFoundation
class SBKCaptureFaceVC: UIViewController, AVCapturePhotoCaptureDelegate {
var captureSession: AVCaptureSession = AVCaptureSession()
var stillImageOutput: AVCapturePhotoOutput = AVCapturePhotoOutput()
var videoPreviewLayer: AVCaptureVideoPreviewLayer!
let videoDataOutput = AVCaptureVideoDataOutput()
var idFront: String = ""
var idBack: String = ""
var URLToken: String = ""
var statusTake: Bool = false
@IBOutlet weak var lbDescription: UILabel!
@IBOutlet weak var viewbaackground: UIView!
@IBOutlet weak var lbCopyright: UILabel!
@IBOutlet weak var viewOval: UIView!
var completionSuccessFace: ([String:Any])->Void = {_ in}
// var validateFace: (Bool) -> Void = {_ in}
// var checkFace: (Bool) -> Void = {_ in}
override func viewDidLoad() {
super.viewDidLoad()
var image = UIImage(named: "back", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil)
image = image?.withRenderingMode(.alwaysOriginal)
let button = UIButton(type: .system)
button.setImage(image, for: .normal)
button.setTitle("Back".localized(), for: .normal)
button.sizeToFit()
button.imageEdgeInsets.left = -5
button.setTitleColor(UIColor.black, for: .normal)
button.addTarget(self, action: #selector(self.back), for: .touchUpInside)
let newBackButton = UIBarButtonItem(customView: button)
self.navigationItem.leftBarButtonItem = newBackButton
}
//Xử lý back
@objc func back(sender: UIBarButtonItem) {
for controller in self.navigationController!.viewControllers as Array {
if controller.isKind(of: SBKCaptureCardVC.self) {
self.navigationController!.popToViewController(controller, animated: true)
break
}
}
}
func loadConfigUI() {
self.lbCopyright.text = Global.copyright
self.lbCopyright.textColor = UIColor.colorFromHexa(Global.colorConfig)
self.lbDescription.textColor = UIColor.colorFromHexa(Global.colorConfig)
if Global.frontConfig == "" {
self.lbCopyright.font = UIFont(name: Global.frontConfig, size: 17)
self.lbDescription.font = UIFont(name: Global.frontConfig, size: 17)
}
}
//Chụp ảnh
@IBAction func onCapture(_ sender: Any) {
if self.statusTake {
if #available(iOS 11.0, *) {
let settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg])
stillImageOutput.capturePhoto(with: settings, delegate: self)
} else {
let settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecJPEG])
stillImageOutput.capturePhoto(with: settings, delegate: self)
}
}
}
init() {
super.init(nibName: "SBKCaptureFaceVC", bundle: Bundle(for: SBKCaptureFaceVC.self))
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
//Cài đặt camera
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
self.loadConfigUI()
self.lbDescription.text = "Position your face in the oval".localized()
captureSession.sessionPreset = .medium
guard let backCamera = AVCaptureDevice.default(.builtInWideAngleCamera, for: AVMediaType.video, position: .front)
else {
print("Unable to access back camera!")
return
}
do {
let input = try AVCaptureDeviceInput(device: backCamera)
if captureSession.canAddInput(input) {
captureSession.addInput(input)
}
if captureSession.canAddOutput(stillImageOutput) {
captureSession.addOutput(stillImageOutput)
setupLivePreview()
}
}
catch let error {
print("Error Unable to initialize back camera: \(error.localizedDescription)")
}
self.getCameraFrames()
DispatchQueue.global(qos: .userInitiated).async {
self.captureSession.startRunning()
}
DispatchQueue.main.async {
self.videoPreviewLayer.frame = self.viewbaackground.bounds
}
}
func setupLivePreview() {
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
videoPreviewLayer.videoGravity = .resizeAspectFill
videoPreviewLayer.connection?.videoOrientation = .portrait
viewbaackground.layer.addSublayer(videoPreviewLayer)
let overlayOval = createOverlayOval(frame: viewbaackground.frame,
xOffset: viewbaackground.frame.midX,
yOffset: viewbaackground.frame.midY,
radius: 50.0)
self.viewbaackground.addSubview(overlayOval)
self.createOverlay(frame: viewOval.frame,
xOffset: viewOval.frame.midX,
yOffset: viewOval.frame.midY,
radius: 50.0)
}
func createOverlayOval(frame: CGRect, xOffset: CGFloat, yOffset: CGFloat, radius: CGFloat) -> UIView {
let overlayView = UIView(frame: frame)
overlayView.backgroundColor = UIColor.black.withAlphaComponent(0.6)
let path = CGMutablePath()
path.addRoundedRect(in: CGRect(x: self.viewOval.frame.origin.x, y: self.viewOval.frame.origin.y,
width: self.viewOval.frame.width, height: self.viewOval.frame.height ),
cornerWidth: 0, cornerHeight: 0)
path.addRect(CGRect(origin: .zero, size: overlayView.frame.size))
let maskLayer = CAShapeLayer()
maskLayer.backgroundColor = UIColor.black.cgColor
maskLayer.path = path
maskLayer.fillRule = .evenOdd
overlayView.layer.mask = maskLayer
overlayView.clipsToBounds = true
return overlayView
}
func createOverlay(frame: CGRect, xOffset: CGFloat, yOffset: CGFloat, radius: CGFloat) {
viewOval.backgroundColor = UIColor.black.withAlphaComponent(0.6)
let pathss: CGMutablePath = {
let path = CGMutablePath()
path.move(to: CGPoint(x: 0, y: viewOval.frame.height/2))
if viewOval.frame.width < viewOval.frame.height {
path.addLine(to: CGPoint(x: 0, y: 0 + viewOval.frame.width / 2))
}
path.addQuadCurve(to: CGPoint(x: viewOval.frame.width / 2, y: 0), control: CGPoint(x: 0, y: 0))
path.addQuadCurve(to: CGPoint(x: viewOval.frame.width, y: 0 + viewOval.frame.width / 2), control: CGPoint(x: viewOval.frame.width, y: 0))
if viewOval.frame.width < viewOval.frame.height {
path.addLine(to: CGPoint(x: viewOval.frame.width, y: viewOval.frame.height / 2))
}
path.addLine(to: CGPoint(x: viewOval.frame.width, y: viewOval.frame.height - viewOval.frame.width / 2))
path.addQuadCurve(to: CGPoint(x: viewOval.frame.width / 2, y: viewOval.frame.height), control: CGPoint(x: viewOval.frame.width, y: viewOval.frame.height))
path.addQuadCurve(to: CGPoint(x: 0, y: viewOval.frame.height - viewOval.frame.width / 2), control: CGPoint(x: 0, y: viewOval.frame.height))
path.addLine(to: CGPoint(x: 0, y: viewOval.frame.height / 2))
return path
}()
pathss.addRect(CGRect(origin: .zero, size: viewOval.frame.size))
let maskLayer = CAShapeLayer(layer: viewOval.layer)
maskLayer.backgroundColor = UIColor.black.cgColor
maskLayer.path = pathss
maskLayer.fillRule = .evenOdd
viewOval.layer.mask = maskLayer
viewOval.clipsToBounds = true
}
//Xử lý ảnh trả về
@available(iOS 11.0, *)
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
guard let imageData = photo.fileDataRepresentation()
else { return }
self.captureSession.stopRunning()
DispatchQueue.main.async {
let viewController = SBKResultFaceVC()
viewController.imageData = imageData
viewController.idFront = self.idFront
viewController.idBack = self.idBack
viewController.URLToken = self.URLToken
viewController.completionSuccessFaceData = { [weak self] data in
guard let `self` = self else { return }
self.completionSuccessFace(data)
}
self.navigationController?.pushViewController(viewController, animated: true)
}
}
func resizeImage(pixelBuffer: CVPixelBuffer) -> CVPixelBuffer {
let validateView = SBKValidateInput.shared
let imageInput = validateView.convertCVPixelToUIImage(pixelBuffer: pixelBuffer)
let imageOutput = validateView.cropImageFace(image: imageInput, rect: CGRect(x: imageInput.size.width / 10, y: imageInput.size.height / 10, width: imageInput.size.width * 8 / 10, height: imageInput.size.height * 8 / 10 ), scale: 1.0)
return SBKValidateInput.shared.comvertUIImageToCVPixel(imageInput: imageOutput!)
}
}
extension SBKCaptureFaceVC: AVCaptureVideoDataOutputSampleBufferDelegate {
private func getCameraFrames() {
self.videoDataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString) : NSNumber(value: kCVPixelFormatType_32BGRA)] as [String : Any]
self.videoDataOutput.alwaysDiscardsLateVideoFrames = true
self.videoDataOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "camera_frame_processing_queue"))
self.captureSession.addOutput(self.videoDataOutput)
guard let connection = self.videoDataOutput.connection(with: AVMediaType.video), connection.isVideoOrientationSupported else { return }
connection.videoOrientation = .portrait
}
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
guard let imageFrameInput = CMSampleBufferGetImageBuffer(sampleBuffer) else {
debugPrint("unable to get image from sample buffer")
return
}
SBKValidateInput.shared.validateFace(imageFace: self.resizeImage(pixelBuffer: imageFrameInput)) {[weak self] data in
guard let `self` = self else { return }
if data {
self.statusTake = true
self.lbDescription.textColor = UIColor.colorFromHexa(Global.colorConfig)
self.lbDescription.text = "Are you ready. Let's start!".localized()
} else {
self.statusTake = false
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = "Position your face in the oval".localized()
}
}
}
}
<?xml version="1.0" encoding="UTF-8"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.XIB" version="3.0" toolsVersion="16097" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES">
<device id="retina6_1" orientation="portrait" appearance="light"/>
<dependencies>
<deployment identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="16087"/>
<capability name="Safe area layout guides" minToolsVersion="9.0"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<objects>
<placeholder placeholderIdentifier="IBFilesOwner" id="-1" userLabel="File's Owner" customClass="SBKCaptureFaceVC" customModule="SB_KYC_SDK" customModuleProvider="target">
<connections>
<outlet property="lbCopyright" destination="sQx-YF-9mT" id="GIf-FP-b30"/>
<outlet property="lbDescription" destination="vEo-Bc-5XO" id="SRU-fS-EZI"/>
<outlet property="view" destination="i5M-Pr-FkT" id="sfx-zR-JGt"/>
<outlet property="viewOval" destination="Qyv-Ap-TFB" id="r4K-p7-GrS"/>
<outlet property="viewbaackground" destination="dHE-nU-pVB" id="MrX-Ag-s0Q"/>
</connections>
</placeholder>
<placeholder placeholderIdentifier="IBFirstResponder" id="-2" customClass="UIResponder"/>
<view clearsContextBeforeDrawing="NO" contentMode="scaleToFill" id="i5M-Pr-FkT">
<rect key="frame" x="0.0" y="0.0" width="414" height="896"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<subviews>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="dHE-nU-pVB">
<rect key="frame" x="0.0" y="0.0" width="414" height="896"/>
<color key="backgroundColor" white="0.0" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
</view>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="Qyv-Ap-TFB">
<rect key="frame" x="40" y="94.5" width="334" height="672"/>
<color key="backgroundColor" white="0.0" alpha="0.0" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
</view>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text=" " textAlignment="center" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="sQx-YF-9mT">
<rect key="frame" x="10" y="64" width="394" height="20.5"/>
<fontDescription key="fontDescription" type="system" pointSize="17"/>
<color key="textColor" white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<nil key="highlightedColor"/>
</label>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text=" " textAlignment="center" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="vEo-Bc-5XO">
<rect key="frame" x="10" y="771.5" width="394" height="20.5"/>
<fontDescription key="fontDescription" type="system" pointSize="17"/>
<color key="textColor" white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<nil key="highlightedColor"/>
</label>
<button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="eoo-q0-oXp">
<rect key="frame" x="182" y="802" width="50" height="50"/>
<color key="backgroundColor" white="0.0" alpha="0.0" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<constraints>
<constraint firstAttribute="width" constant="50" id="Ocl-5u-hSa"/>
<constraint firstAttribute="height" constant="50" id="RYC-E4-iS3"/>
</constraints>
<state key="normal" image="iconCap.png"/>
<connections>
<action selector="onCapture:" destination="-1" eventType="touchUpInside" id="Kll-et-YXX"/>
</connections>
</button>
</subviews>
<color key="backgroundColor" red="1" green="1" blue="1" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<constraints>
<constraint firstAttribute="trailing" secondItem="dHE-nU-pVB" secondAttribute="trailing" id="C6C-oe-xWb"/>
<constraint firstItem="vEo-Bc-5XO" firstAttribute="top" secondItem="Qyv-Ap-TFB" secondAttribute="bottom" constant="5" id="FIE-96-ylZ"/>
<constraint firstItem="eoo-q0-oXp" firstAttribute="top" secondItem="vEo-Bc-5XO" secondAttribute="bottom" constant="10" id="PPO-Iz-6Om"/>
<constraint firstAttribute="bottom" secondItem="dHE-nU-pVB" secondAttribute="bottom" id="SDS-kl-rae"/>
<constraint firstItem="fnl-2z-Ty3" firstAttribute="bottom" secondItem="eoo-q0-oXp" secondAttribute="bottom" constant="10" id="UgG-h0-6jb"/>
<constraint firstItem="dHE-nU-pVB" firstAttribute="leading" secondItem="i5M-Pr-FkT" secondAttribute="leading" id="VGi-MV-mn2"/>
<constraint firstItem="vEo-Bc-5XO" firstAttribute="leading" secondItem="fnl-2z-Ty3" secondAttribute="leading" constant="10" id="Z61-q1-Mcf"/>
<constraint firstItem="sQx-YF-9mT" firstAttribute="centerX" secondItem="fnl-2z-Ty3" secondAttribute="centerX" id="a0C-ae-8Fa"/>
<constraint firstItem="sQx-YF-9mT" firstAttribute="top" secondItem="fnl-2z-Ty3" secondAttribute="top" constant="20" id="a72-fY-IOo"/>
<constraint firstItem="Qyv-Ap-TFB" firstAttribute="top" secondItem="sQx-YF-9mT" secondAttribute="bottom" constant="10" id="gUQ-xb-FQi"/>
<constraint firstItem="eoo-q0-oXp" firstAttribute="centerX" secondItem="fnl-2z-Ty3" secondAttribute="centerX" id="jCw-e4-JLW"/>
<constraint firstItem="sQx-YF-9mT" firstAttribute="leading" secondItem="fnl-2z-Ty3" secondAttribute="leading" constant="10" id="kR7-Ie-Uwj"/>
<constraint firstItem="Qyv-Ap-TFB" firstAttribute="centerX" secondItem="fnl-2z-Ty3" secondAttribute="centerX" id="nUl-ft-cgQ"/>
<constraint firstItem="dHE-nU-pVB" firstAttribute="top" secondItem="i5M-Pr-FkT" secondAttribute="top" id="rdM-v3-KGL"/>
<constraint firstItem="Qyv-Ap-TFB" firstAttribute="leading" secondItem="fnl-2z-Ty3" secondAttribute="leading" constant="40" id="vxR-vG-Tjg"/>
<constraint firstItem="vEo-Bc-5XO" firstAttribute="centerX" secondItem="fnl-2z-Ty3" secondAttribute="centerX" id="xRm-qx-ZZJ"/>
</constraints>
<viewLayoutGuide key="safeArea" id="fnl-2z-Ty3"/>
<point key="canvasLocation" x="133" y="124"/>
</view>
</objects>
<resources>
<image name="iconCap.png" width="172" height="172"/>
</resources>
</document>
//
// RecordFace.swift
// OCR-SDK
//
// Created by itsol on 6/5/20.
// Copyright © 2020 itsol. All rights reserved.
//
import UIKit
import AVFoundation
import Vision
import CoreML
class SBKRecordFace: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate {
@IBOutlet weak var viewBackground: UIView!
@IBOutlet weak var lbDescription: UILabel!
@IBOutlet weak var viewOval: UIView!
@IBOutlet weak var viewCheckStep1: UIView!
@IBOutlet weak var viewCheckStep2: UIView!
@IBOutlet weak var viewCheckStep3: UIView!
@IBOutlet weak var imageDescription: UIImageView!
private let captureSession = AVCaptureSession()
private lazy var previewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession)
private let videoDataOutput = AVCaptureVideoDataOutput()
private var modelDataHandler: SBKModelDataHandler? =
SBKModelDataHandler(modelFileInfo: MobileNet.modelInfo)
private var result: [Float]?
private var previousInferenceTimeMs: TimeInterval = Date.distantPast.timeIntervalSince1970 * 1000
private let delayBetweenInferencesMs: Double = 1000
var numberPass = 0
var numberTrue = 0
var numberFalse = 0
var space: Float = 0.0
var dataImageSuccess: [Data] = []
var checkStartRecord: Bool = false
var timer = Timer()
var timeRecord: Int = 0
var checkStatusRecord: Bool = false
var checkStep: Int = 0
var idFront: String = ""
var idBack: String = ""
var URLToken: String = ""
public var completionSuccessFaceRecord: ([String:Any])->Void = {_ in}
override func viewDidLoad() {
super.viewDidLoad()
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
self.addCameraInput()
self.showCameraFeed()
self.getCameraFrames()
DispatchQueue.global(qos: .userInitiated).async {
self.captureSession.startRunning()
}
}
func createOverlay(frame: CGRect, xOffset: CGFloat, yOffset: CGFloat, radius: CGFloat) {
viewOval.backgroundColor = UIColor.black.withAlphaComponent(0.6)
let pathss: CGMutablePath = {
let path = CGMutablePath()
path.move(to: CGPoint(x: 0, y: viewOval.frame.height/2))
if viewOval.frame.width < viewOval.frame.height {
path.addLine(to: CGPoint(x: 0, y: 0 + viewOval.frame.width / 2))
}
path.addQuadCurve(to: CGPoint(x: viewOval.frame.width / 2, y: 0), control: CGPoint(x: 0, y: 0))
path.addQuadCurve(to: CGPoint(x: viewOval.frame.width, y: 0 + viewOval.frame.width / 2), control: CGPoint(x: viewOval.frame.width, y: 0))
if viewOval.frame.width < viewOval.frame.height {
path.addLine(to: CGPoint(x: viewOval.frame.width, y: viewOval.frame.height / 2))
}
path.addLine(to: CGPoint(x: viewOval.frame.width, y: viewOval.frame.height - viewOval.frame.width / 2))
path.addQuadCurve(to: CGPoint(x: viewOval.frame.width / 2, y: viewOval.frame.height), control: CGPoint(x: viewOval.frame.width, y: viewOval.frame.height))
path.addQuadCurve(to: CGPoint(x: 0, y: viewOval.frame.height - viewOval.frame.width / 2), control: CGPoint(x: 0, y: viewOval.frame.height))
path.addLine(to: CGPoint(x: 0, y: viewOval.frame.height / 2))
return path
}()
pathss.addRect(CGRect(origin: .zero, size: viewOval.frame.size))
let maskLayer = CAShapeLayer(layer: viewOval.layer)
maskLayer.backgroundColor = UIColor.black.cgColor
maskLayer.path = pathss
maskLayer.fillRule = .evenOdd
viewOval.layer.mask = maskLayer
viewOval.clipsToBounds = true
}
func didOutput(pixelBuffer: CVPixelBuffer, statusFace: StatusFace) {
let currentTimeMs = Date().timeIntervalSince1970 * 1000
guard (currentTimeMs - previousInferenceTimeMs) >= delayBetweenInferencesMs else { return }
previousInferenceTimeMs = currentTimeMs
// Pass the pixel buffer to TensorFlow Lite to perform inference.
result = modelDataHandler?.runModel(onFrame: pixelBuffer)
if self.checkStatusRecord {
(result![0] < result![1]) ? (self.numberTrue += 1) : (self.numberFalse += 1)
self.numberPass += 1
DispatchQueue.main.async {
let ciimage : CIImage = CIImage(cvPixelBuffer: pixelBuffer)
let imageView : UIImage = SBKValidateInput.shared.convertCIToUIImage(cmage: ciimage)
if self.result![0] < self.result![1] {
if statusFace == .STRAIGHTFACE && self.checkStep == 0 {
if self.dataImageSuccess.count == 0 {
self.dataImageSuccess.append(imageView.pngData()!)
}
self.lbDescription.textColor = UIColor.green
self.lbDescription.text = "Exactly".localized()
self.checkStep = 1
self.viewCheckStep1.backgroundColor = UIColor.colorFromHexa("#FBA02E")
} else if statusFace != .STRAIGHTFACE && self.checkStep == 0 {
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = "Please look straight".localized()
self.viewCheckStep1.backgroundColor = UIColor.colorFromHexa("#FE3500")
} else if statusFace == .TORIGHT && self.checkStep == 1 {
self.lbDescription.textColor = UIColor.green
self.lbDescription.text = "Exactly".localized()
self.checkStep = 2
self.viewCheckStep2.backgroundColor = UIColor.colorFromHexa("#FBA02E")
if self.dataImageSuccess.count == 1 {
self.dataImageSuccess.append(imageView.pngData()!)
}
} else if statusFace != .TORIGHT && self.checkStep == 1 {
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = "Please turn to the right".localized()
self.viewCheckStep2.backgroundColor = UIColor.colorFromHexa("#FE3500")
} else if statusFace == .TOLEFT && self.checkStep == 2 {
self.lbDescription.textColor = UIColor.green
self.lbDescription.text = "Exactly".localized()
self.checkStep = 3
self.viewCheckStep3.backgroundColor = UIColor.colorFromHexa("#FBA02E")
if self.dataImageSuccess.count == 2 {
self.dataImageSuccess.append(imageView.pngData()!)
}
} else if statusFace != .TOLEFT && self.checkStep == 2 {
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = "Please turn to the left".localized()
self.viewCheckStep3.backgroundColor = UIColor.colorFromHexa("#FE3500")
}
} else {
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = "Incorrect face, please check!".localized()
}
}
}
}
init() {
super.init(nibName: "SBKRecordFace", bundle: Bundle(for: SBKRecordFace.self))
guard modelDataHandler != nil else {
fatalError("Model set up failed")
}
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
@IBAction func onRecord(_ sender: Any) {
if !self.checkStatusRecord {
self.startTimer()
self.checkStatusRecord = true
self.timeRecord = 0
}
}
@IBAction func onSelectBackTop(_ sender: Any) {
for controller in self.navigationController!.viewControllers as Array {
if controller.isKind(of: SBKCaptureCardVC.self) {
self.navigationController!.popToViewController(controller, animated: true)
break
}
}
}
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
guard let frame = CMSampleBufferGetImageBuffer(sampleBuffer) else {
debugPrint("unable to get image from sample buffer")
return
}
if #available(iOS 11.0, *) {
self.detectFace(in: self.resizeImageFace(pixelBuffer: frame))
} else {
// Fallback on earlier versions
}
}
private func addCameraInput() {
self.captureSession.sessionPreset = .medium
if #available(iOS 11.1, *) {
guard let device = AVCaptureDevice.DiscoverySession(
deviceTypes: [.builtInWideAngleCamera, .builtInDualCamera, .builtInTrueDepthCamera],
mediaType: .video,
position: .front).devices.first else {
fatalError("No back camera device found, please make sure to run SimpleLaneDetection in an iOS device and not a simulator")
}
let cameraInput = try! AVCaptureDeviceInput(device: device)
if captureSession.canAddInput(cameraInput) {
self.captureSession.addInput(cameraInput)
}
}
}
private func showCameraFeed() {
self.previewLayer.videoGravity = .resize
self.previewLayer.frame = self.viewBackground.frame
self.viewBackground.layer.addSublayer(self.previewLayer)
self.lbDescription.text = "Position your face in the oval".localized()
let overlayOval = createOverlayOval(frame: viewBackground.frame, xOffset: viewBackground.frame.midX, yOffset: viewBackground.frame.midY, radius: 50.0)
self.viewBackground.addSubview(overlayOval)
self.createOverlay(frame: viewOval.frame, xOffset: viewOval.frame.midX, yOffset: viewOval.frame.midY, radius: 50.0)
}
func createOverlayOval(frame: CGRect, xOffset: CGFloat, yOffset: CGFloat, radius: CGFloat) -> UIView {
let overlayView = UIView(frame: frame)
overlayView.backgroundColor = UIColor.black.withAlphaComponent(0.6)
let path = CGMutablePath()
path.addRoundedRect(in: CGRect(x: self.viewOval.frame.origin.x, y: self.viewOval.frame.origin.y, width: self.viewOval.frame.width, height: self.viewOval.frame.height ), cornerWidth: 0, cornerHeight: 0)
path.addRect(CGRect(origin: .zero, size: overlayView.frame.size))
let maskLayer = CAShapeLayer()
maskLayer.backgroundColor = UIColor.black.cgColor
maskLayer.path = path
maskLayer.fillRule = .evenOdd
overlayView.layer.mask = maskLayer
overlayView.clipsToBounds = true
return overlayView
}
private func getCameraFrames() {
self.videoDataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString) : NSNumber(value: kCVPixelFormatType_32BGRA)] as [String : Any]
self.videoDataOutput.alwaysDiscardsLateVideoFrames = true
self.videoDataOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "camera_frame_processing_queue"))
self.captureSession.addOutput(self.videoDataOutput)
guard let connection = self.videoDataOutput.connection(with: AVMediaType.video),
connection.isVideoOrientationSupported else { return }
connection.videoOrientation = .portrait
}
@available(iOS 11.0, *)
private func detectFace(in image: CVPixelBuffer) {
let faceDetectionRequest = VNDetectFaceLandmarksRequest(completionHandler: { (request: VNRequest, error: Error?) in
DispatchQueue.main.async {
if (request.results as? [VNFaceObservation]) != nil {
if let results = request.results as? [VNFaceObservation], results.count > 0 {
if results.count > 1 {
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = "There are many faces in the frame".localized()
} else {
let statusString = self.checkFaceRightLeft(landmarks: results[0].landmarks!)
self.startTimer()
if !self.checkStartRecord && !self.checkStatusRecord {
self.checkStartRecord = true
self.lbDescription.textColor = UIColor.white
//self.lbDescription.text = "Are you ready. Let's start!".localized()//"Bạn đã sẵn sàng. Hãy bắt đầu!"
if !self.checkStatusRecord {
self.checkStatusRecord = true
self.timeRecord = 0
self.checkStep = 0
self.viewCheckStep1.backgroundColor = UIColor.colorFromHexa("#333333")
self.viewCheckStep2.backgroundColor = UIColor.colorFromHexa("#333333")
self.viewCheckStep3.backgroundColor = UIColor.colorFromHexa("#333333")
}
}
DispatchQueue.global().async {
self.didOutput(pixelBuffer: image, statusFace: statusString)
}
}
} else {
if !self.checkStatusRecord {
self.checkStartRecord = false
self.lbDescription.textColor = UIColor.white
self.imageDescription.image = UIImage(named: "Scan-1@2x", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil)
self.lbDescription.text = "Position your face in the oval".localized()//"Đặt vị trí mặt bạn vào hình"
} else {
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = "Incorrect face, please check!".localized()
DispatchQueue.global().async {
self.didOutput(pixelBuffer: image, statusFace: .ERROR)
}
}
}
}
}
})
if #available(iOS 12.0, *) {
// Force the revision to 2 (68-points) even on iOS 13 or greater
// when VNDetectFaceLandmarksRequestRevision3 is available.
faceDetectionRequest.revision = 2
}
let imageRequestHandler = VNImageRequestHandler(cvPixelBuffer: image, orientation: .leftMirrored, options: [:])
try? imageRequestHandler.perform([faceDetectionRequest])
}
func startTimer() {
switch self.checkStep {
case 0:
self.imageDescription.image = UIImage(named: "Scan-3@2x", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil)
self.lbDescription.textColor = UIColor.white
self.lbDescription.text = "Please look straight".localized()//"vui lòng nhìn thẳng"
case 1:
self.imageDescription.image = UIImage(named: "Scan-5@2x", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil)
self.lbDescription.textColor = UIColor.white
self.lbDescription.text = "Please turn to the right".localized()//"Quay sang phải"
case 2:
self.imageDescription.image = UIImage(named: "Scan-4@2x", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil)
self.lbDescription.textColor = UIColor.white
self.lbDescription.text = "Please turn to the left".localized()//"Quay sang trái"
case 3:
if self.dataImageSuccess.count > 2 {
DispatchQueue.main.async {
let viewController = SBKResultFaceVC()
viewController.imageData = self.dataImageSuccess[0]
viewController.idFront = self.idFront
viewController.idBack = self.idBack
viewController.URLToken = self.URLToken
viewController.dataImageSuccess = self.dataImageSuccess
viewController.completionSuccessFaceData = { [weak self] data in
guard let `self` = self else { return }
self.completionSuccessFaceRecord(data)
}
self.timeRecord = 0
self.checkStep = 0
self.resetRecord()
self.timer.invalidate()
self.captureSession.stopRunning()
self.captureSession.removeOutput(self.videoDataOutput)
self.navigationController?.pushViewController(viewController, animated: true)
}
} else {
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = "Unsatisfactory, please try again!".localized()//"Chưa đạt yêu cầu, hãy thử lại!"
}
default:
break
}
}
func resetRecord() {
self.numberPass = 0
self.numberTrue = 0
self.numberFalse = 0
self.space = 0.0
self.dataImageSuccess.removeAll()
self.checkStartRecord = false
self.checkStatusRecord = false
}
func resizeImageFace(pixelBuffer: CVPixelBuffer) -> CVPixelBuffer {
let validateView = SBKValidateInput.shared
let imageInput = validateView.convertCVPixelToUIImage(pixelBuffer: pixelBuffer)
let imageOutput = validateView.cropImageFace(image: imageInput, rect: CGRect(x: imageInput.size.width / 10, y: imageInput.size.height / 10, width: imageInput.size.width * 8 / 10, height: imageInput.size.height * 8 / 10 ), scale: 1.0)
let ciimage = CIImage(image: imageOutput!)
let tmpcontext = CIContext(options: nil)
let cgimage = tmpcontext.createCGImage(ciimage!, from: ciimage!.extent)
return validateView.convertCGImgeToCVPixelBuffer(forImage: cgimage!)!
}
@available(iOS 11.0, *)
func checkFaceRightLeft(landmarks: VNFaceLandmarks2D) -> StatusFace {
let face = landmarks.faceContour?.normalizedPoints
let nose = landmarks.nose?.normalizedPoints
let faceLeftPoint = face![0]
let faceRightPoint = face![face!.count - 1]
let faceBottomPoint = face![5]
let nosePoint = nose![4]
let sRight = (faceBottomPoint.x - faceLeftPoint.x) * (nosePoint.y - faceLeftPoint.y) - (nosePoint.x - faceLeftPoint.x) * (faceBottomPoint.y - faceLeftPoint.y)
let sLeft = (faceBottomPoint.x - faceRightPoint.x) * (nosePoint.y - faceRightPoint.y) - (nosePoint.x - faceRightPoint.x) * (faceBottomPoint.y - faceRightPoint.y)
let sFaceRight = sqrt(sRight * sRight) / 2
let sFaceLeft = sqrt(sLeft * sLeft) / 2
let totalS = sFaceLeft + sFaceRight
if sFaceLeft / totalS > 0.6 {
return .TOLEFT
} else if sFaceRight / totalS > 0.6 {
return .TORIGHT
} else {
return .STRAIGHTFACE
}
}
}
enum StatusFace: String {
case TOLEFT
case TORIGHT
case STRAIGHTFACE
case ERROR
}
<?xml version="1.0" encoding="UTF-8"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.XIB" version="3.0" toolsVersion="16097" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES">
<device id="retina6_5" orientation="portrait" appearance="light"/>
<dependencies>
<deployment identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="16087"/>
<capability name="Safe area layout guides" minToolsVersion="9.0"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<objects>
<placeholder placeholderIdentifier="IBFilesOwner" id="-1" userLabel="File's Owner" customClass="SBKRecordFace" customModule="SB_KYC_SDK" customModuleProvider="target">
<connections>
<outlet property="imageDescription" destination="5Z6-br-pb7" id="4Af-3O-y0d"/>
<outlet property="lbDescription" destination="gxX-Gl-8oL" id="PxG-lz-qKz"/>
<outlet property="view" destination="i5M-Pr-FkT" id="sfx-zR-JGt"/>
<outlet property="viewBackground" destination="A99-ZK-7mQ" id="Vhv-Sb-Xq1"/>
<outlet property="viewCheckStep1" destination="Fat-OD-xTH" id="e47-Tn-yx8"/>
<outlet property="viewCheckStep2" destination="zm5-3V-COp" id="3RJ-gk-Qvt"/>
<outlet property="viewCheckStep3" destination="ty6-7l-MQb" id="3t5-39-AJk"/>
<outlet property="viewOval" destination="uc0-TC-E91" id="zHh-td-jnf"/>
</connections>
</placeholder>
<placeholder placeholderIdentifier="IBFirstResponder" id="-2" customClass="UIResponder"/>
<view clearsContextBeforeDrawing="NO" contentMode="scaleToFill" id="i5M-Pr-FkT">
<rect key="frame" x="0.0" y="0.0" width="414" height="896"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<subviews>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="A99-ZK-7mQ">
<rect key="frame" x="0.0" y="0.0" width="414" height="896"/>
<color key="backgroundColor" white="0.0" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
</view>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="RCk-ff-zkC">
<rect key="frame" x="0.0" y="0.0" width="414" height="44"/>
<color key="backgroundColor" red="0.88784521819999995" green="0.96387571100000002" blue="0.98960381750000004" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
</view>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="8P2-sh-7ZI">
<rect key="frame" x="0.0" y="44" width="414" height="44"/>
<subviews>
<button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="9aJ-b2-PTo">
<rect key="frame" x="5" y="7" width="30" height="30"/>
<constraints>
<constraint firstAttribute="height" constant="30" id="NiG-9X-lHn"/>
<constraint firstAttribute="width" constant="30" id="iZ5-ee-6k3"/>
</constraints>
<inset key="imageEdgeInsets" minX="9" minY="3" maxX="5" maxY="3"/>
<state key="normal" image="Back.png"/>
<connections>
<action selector="onSelectBackTop:" destination="-1" eventType="touchUpInside" id="WB6-ag-8NC"/>
</connections>
</button>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="Scan your face" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="iF4-Or-Bd6">
<rect key="frame" x="150.33333333333334" y="11.999999999999998" width="113.66666666666666" height="20.333333333333329"/>
<fontDescription key="fontDescription" type="system" pointSize="17"/>
<nil key="textColor"/>
<nil key="highlightedColor"/>
</label>
</subviews>
<color key="backgroundColor" red="0.88784521819999995" green="0.96387571100000002" blue="0.98960381750000004" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<constraints>
<constraint firstItem="iF4-Or-Bd6" firstAttribute="centerX" secondItem="8P2-sh-7ZI" secondAttribute="centerX" id="3k5-8b-5bd"/>
<constraint firstItem="9aJ-b2-PTo" firstAttribute="leading" secondItem="8P2-sh-7ZI" secondAttribute="leading" constant="5" id="4HC-y0-4jU"/>
<constraint firstAttribute="height" constant="44" id="6zN-iy-kk5"/>
<constraint firstItem="iF4-Or-Bd6" firstAttribute="centerY" secondItem="8P2-sh-7ZI" secondAttribute="centerY" id="W1F-DG-zNi"/>
<constraint firstItem="9aJ-b2-PTo" firstAttribute="centerY" secondItem="8P2-sh-7ZI" secondAttribute="centerY" id="bcM-iZ-xUf"/>
</constraints>
</view>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="uc0-TC-E91">
<rect key="frame" x="60" y="114" width="294" height="536.33333333333337"/>
<color key="backgroundColor" white="0.0" alpha="0.0" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
</view>
<stackView opaque="NO" contentMode="scaleToFill" distribution="fillEqually" spacing="5" translatesAutoresizingMaskIntoConstraints="NO" id="QfU-Oi-8vZ">
<rect key="frame" x="100" y="670.33333333333337" width="214" height="6"/>
<subviews>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="Fat-OD-xTH">
<rect key="frame" x="0.0" y="0.0" width="68" height="6"/>
<color key="backgroundColor" red="0.20000000000000001" green="0.20000000000000001" blue="0.20000000000000001" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<userDefinedRuntimeAttributes>
<userDefinedRuntimeAttribute type="number" keyPath="cornerRadius">
<integer key="value" value="3"/>
</userDefinedRuntimeAttribute>
</userDefinedRuntimeAttributes>
</view>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="zm5-3V-COp">
<rect key="frame" x="73" y="0.0" width="68" height="6"/>
<color key="backgroundColor" red="0.20067420599999999" green="0.20070806150000001" blue="0.20066353679999999" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<userDefinedRuntimeAttributes>
<userDefinedRuntimeAttribute type="number" keyPath="cornerRadius">
<integer key="value" value="3"/>
</userDefinedRuntimeAttribute>
</userDefinedRuntimeAttributes>
</view>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="ty6-7l-MQb">
<rect key="frame" x="146" y="0.0" width="68" height="6"/>
<color key="backgroundColor" red="0.20067420599999999" green="0.20070806150000001" blue="0.20066353679999999" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<userDefinedRuntimeAttributes>
<userDefinedRuntimeAttribute type="number" keyPath="cornerRadius">
<integer key="value" value="3"/>
</userDefinedRuntimeAttribute>
</userDefinedRuntimeAttributes>
</view>
</subviews>
<constraints>
<constraint firstAttribute="height" constant="6" id="G6W-Md-Nwc"/>
</constraints>
</stackView>
<imageView clipsSubviews="YES" userInteractionEnabled="NO" contentMode="scaleAspectFit" horizontalHuggingPriority="251" verticalHuggingPriority="251" image="Scan-1.png" translatesAutoresizingMaskIntoConstraints="NO" id="5Z6-br-pb7">
<rect key="frame" x="183.66666666666666" y="696.33333333333337" width="46.666666666666657" height="60"/>
<constraints>
<constraint firstAttribute="width" secondItem="5Z6-br-pb7" secondAttribute="height" multiplier="341:437" id="aHk-dc-GVL"/>
<constraint firstAttribute="height" constant="60" id="eEa-r1-Nja"/>
</constraints>
</imageView>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="mYW-LC-gXD">
<rect key="frame" x="172.66666666666666" y="771.33333333333337" width="68.666666666666657" height="20.333333333333371"/>
<subviews>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="Step 3/3" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="AZY-QZ-V1U">
<rect key="frame" x="9.9999999999999964" y="3.0000000000000009" width="48.666666666666657" height="14.333333333333336"/>
<fontDescription key="fontDescription" type="system" pointSize="12"/>
<color key="textColor" white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<nil key="highlightedColor"/>
</label>
</subviews>
<color key="backgroundColor" red="0.39954239130000002" green="0.39960256220000001" blue="0.39952337739999999" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<constraints>
<constraint firstAttribute="trailing" secondItem="AZY-QZ-V1U" secondAttribute="trailing" constant="10" id="RPu-Tq-inW"/>
<constraint firstAttribute="bottom" secondItem="AZY-QZ-V1U" secondAttribute="bottom" constant="3" id="ZWT-ux-p1V"/>
<constraint firstItem="AZY-QZ-V1U" firstAttribute="top" secondItem="mYW-LC-gXD" secondAttribute="top" constant="3" id="r0p-bq-VBg"/>
<constraint firstItem="AZY-QZ-V1U" firstAttribute="leading" secondItem="mYW-LC-gXD" secondAttribute="leading" constant="10" id="ySz-b9-MBe"/>
</constraints>
<userDefinedRuntimeAttributes>
<userDefinedRuntimeAttribute type="number" keyPath="cornerRadius">
<integer key="value" value="3"/>
</userDefinedRuntimeAttribute>
</userDefinedRuntimeAttributes>
</view>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text=" position your face in the oval" textAlignment="center" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="gxX-Gl-8oL">
<rect key="frame" x="10" y="801.66666666666663" width="394" height="20.333333333333371"/>
<fontDescription key="fontDescription" type="system" pointSize="17"/>
<color key="textColor" white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<nil key="highlightedColor"/>
</label>
</subviews>
<color key="backgroundColor" red="0.88784521819999995" green="0.96387571100000002" blue="0.98960381750000004" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<constraints>
<constraint firstItem="5Z6-br-pb7" firstAttribute="centerX" secondItem="TI8-zP-E4o" secondAttribute="centerX" id="0Uk-6w-soF"/>
<constraint firstItem="A99-ZK-7mQ" firstAttribute="top" secondItem="i5M-Pr-FkT" secondAttribute="top" id="AkV-Hq-iHE"/>
<constraint firstItem="TI8-zP-E4o" firstAttribute="trailing" secondItem="8P2-sh-7ZI" secondAttribute="trailing" id="BCt-n2-Jx9"/>
<constraint firstAttribute="trailing" secondItem="A99-ZK-7mQ" secondAttribute="trailing" id="Coq-YU-b0E"/>
<constraint firstItem="A99-ZK-7mQ" firstAttribute="leading" secondItem="i5M-Pr-FkT" secondAttribute="leading" id="Cyr-Uy-b1W"/>
<constraint firstItem="gxX-Gl-8oL" firstAttribute="centerX" secondItem="i5M-Pr-FkT" secondAttribute="centerX" id="Fke-D7-GLi"/>
<constraint firstItem="5Z6-br-pb7" firstAttribute="top" secondItem="QfU-Oi-8vZ" secondAttribute="bottom" constant="20" id="JDz-Uh-8I9"/>
<constraint firstItem="RCk-ff-zkC" firstAttribute="top" secondItem="i5M-Pr-FkT" secondAttribute="top" id="JiH-Ul-mXJ"/>
<constraint firstItem="gxX-Gl-8oL" firstAttribute="top" secondItem="mYW-LC-gXD" secondAttribute="bottom" constant="10" id="Kzd-o1-dtn"/>
<constraint firstItem="uc0-TC-E91" firstAttribute="leading" secondItem="TI8-zP-E4o" secondAttribute="leading" constant="60" id="M3h-uZ-Jta"/>
<constraint firstItem="8P2-sh-7ZI" firstAttribute="top" secondItem="RCk-ff-zkC" secondAttribute="bottom" id="QgH-TC-JzH"/>
<constraint firstItem="QfU-Oi-8vZ" firstAttribute="top" secondItem="uc0-TC-E91" secondAttribute="bottom" constant="20" id="Uop-Th-c8F"/>
<constraint firstItem="TI8-zP-E4o" firstAttribute="bottom" secondItem="gxX-Gl-8oL" secondAttribute="bottom" constant="40" id="VCo-Fz-u6h"/>
<constraint firstItem="QfU-Oi-8vZ" firstAttribute="centerX" secondItem="TI8-zP-E4o" secondAttribute="centerX" id="aMg-a4-rAa"/>
<constraint firstItem="mYW-LC-gXD" firstAttribute="top" secondItem="5Z6-br-pb7" secondAttribute="bottom" constant="15" id="bsa-51-WOA"/>
<constraint firstItem="uc0-TC-E91" firstAttribute="top" secondItem="8P2-sh-7ZI" secondAttribute="bottom" constant="26" id="cPt-cf-ZSL"/>
<constraint firstItem="uc0-TC-E91" firstAttribute="centerX" secondItem="TI8-zP-E4o" secondAttribute="centerX" id="dhi-5U-IZa"/>
<constraint firstItem="8P2-sh-7ZI" firstAttribute="leading" secondItem="TI8-zP-E4o" secondAttribute="leading" id="fg5-R6-1yD"/>
<constraint firstItem="TI8-zP-E4o" firstAttribute="trailing" secondItem="RCk-ff-zkC" secondAttribute="trailing" id="fu5-m8-vUB"/>
<constraint firstItem="8P2-sh-7ZI" firstAttribute="top" secondItem="TI8-zP-E4o" secondAttribute="top" id="fuz-VJ-m7Z"/>
<constraint firstItem="mYW-LC-gXD" firstAttribute="centerX" secondItem="TI8-zP-E4o" secondAttribute="centerX" id="pK9-eV-9dF"/>
<constraint firstAttribute="bottom" secondItem="A99-ZK-7mQ" secondAttribute="bottom" id="rW7-v3-NgY"/>
<constraint firstItem="QfU-Oi-8vZ" firstAttribute="leading" secondItem="TI8-zP-E4o" secondAttribute="leading" constant="100" id="sG1-Bj-6aG"/>
<constraint firstItem="gxX-Gl-8oL" firstAttribute="leading" secondItem="i5M-Pr-FkT" secondAttribute="leading" constant="10" id="tIq-Or-5Bt"/>
<constraint firstItem="RCk-ff-zkC" firstAttribute="leading" secondItem="TI8-zP-E4o" secondAttribute="leading" id="teq-EN-Mp0"/>
</constraints>
<viewLayoutGuide key="safeArea" id="TI8-zP-E4o"/>
<point key="canvasLocation" x="131.25" y="153.16901408450704"/>
</view>
</objects>
<resources>
<image name="Back.png" width="31.5" height="53.5"/>
<image name="Scan-1.png" width="170.5" height="218.5"/>
</resources>
</document>
//
// ResultFaceVC.swift
// SSSSS
//
// Created by itsol on 5/15/20.
// Copyright © 2020 itsol. All rights reserved.
//
import UIKit
class SBKResultFaceVC: UIViewController {
@IBOutlet weak var imgFace: UIImageView!
@IBOutlet weak var btnClose: UIButton!
@IBOutlet weak var btnNext: UIButton!
@IBOutlet weak var lbDescription: UILabel!
@IBOutlet weak var lbCopyright: UILabel!
@IBOutlet weak var imagetest1: UIImageView!
@IBOutlet weak var imagetest2: UIImageView!
var imageData: Data?
var dataImageSuccess: [Data] = []
var idFront: String = ""
var idBack: String = ""
var URLToken: String = ""
var validateImage: Bool = true
var dataPush: [String:Any]?
static let shared = SBKResultFaceVC()
var completionSuccessFaceData: ([String:Any])->Void = {_ in}
override func viewDidLoad() {
super.viewDidLoad()
}
func loadConfigUI() {
//self.lbCopyright.text = Global.copyright
//self.lbCopyright.textColor = UIColor.colorFromHexa(Global.colorTextPreview)
// self.btnNext.backgroundColor = UIColor.colorFromHexa(Global.colorButtonNext)
// self.btnClose.backgroundColor = UIColor.colorFromHexa(Global.colorButtonBack)
// self.btnNext.setTitleColor(UIColor.colorFromHexa(Global.colorTextButtonNext), for: .normal)
// self.btnClose.setTitleColor(UIColor.colorFromHexa(Global.colorTextButtonBack), for: .normal)
if Global.frontConfig == "" {
//self.lbCopyright.font = UIFont(name: Global.frontConfig, size: 17)
// self.lbDescription.font = UIFont(name: Global.frontConfig, size: 17)
// self.btnNext.titleLabel?.font = UIFont(name: Global.frontConfig, size: 17)
// self.btnClose.titleLabel?.font = UIFont(name: Global.frontConfig, size: 17)
}
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
self.loadConfigUI()
let imageCap = UIImage(data: imageData!)
self.imgFace.layer.cornerRadius = 8
let scale = imgFace.frame.width / imageCap!.size.width
let cropImage = self.cropImage(image: imageCap!, rect: CGRect(x: imageCap!.size.width / 10, y: imageCap!.size.height / 10, width: self.imgFace.frame.width, height: self.imgFace.frame.height ), scale: scale)
if Global.typeFace == "TAKEPHOTO" {
self.imgFace.image = cropImage
} else {
self.imgFace.image = imageCap
}
// btnNext.setTitle("Confirm my selfie".localized(), for: .normal)
// btnClose.setTitle("Take a new selfie".localized(), for: .normal)
lbDescription.text = "Make sure your selfie clearly shows your face".localized()
}
//Xử lý ảnh hiển thị
func cropImage(image: UIImage, rect: CGRect, scale: CGFloat) -> UIImage? {
let imageCap = UIImage(data: imageData!)
let widthCrop = imageCap!.size.width - imageCap!.size.width / 4
UIGraphicsBeginImageContextWithOptions(CGSize(width: widthCrop, height: (imageCap!.size.width - imageCap!.size.width / 3) * 4 / 3), true, 0.0)
image.draw(at: CGPoint(x: -rect.origin.x / scale, y: -rect.origin.y / scale))
let croppedImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return croppedImage
}
init() {
super.init(nibName: "SBKResultFaceVC", bundle: Bundle(for: SBKResultFaceVC.self))
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
//Chuyển sang màn hình kết quả
@IBAction func onNext(_ sender: Any) {
Loading.shared.showLoading(viewMain: self)
SBOCRRequest.shared.processFace(image: self.imgFace.image!.pngData()! , pathURL: Global.url_face, idBack: self.idBack, idFront: self.idFront) {(errorCode,data) -> Void in
Loading.shared.hideLoading(viewMain: self)
guard errorCode == nil else {
DispatchQueue.main.async {
self.lbDescription.textColor = UIColor.red
self.lbDescription.text = "Server error".localized()
}
return
}
self.completionSuccessFaceData(data!)
}
}
//quay về màn chụp mặt
@IBAction func onBack(_ sender: Any) {
self.navigationController?.popViewController(animated: true)
}
@IBAction func onSelectBackTop(_ sender: Any) {
for controller in self.navigationController!.viewControllers as Array {
if controller.isKind(of: SBKCaptureCardVC.self) {
self.navigationController!.popToViewController(controller, animated: true)
break
}
}
}
}
<?xml version="1.0" encoding="UTF-8"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.XIB" version="3.0" toolsVersion="16097" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES">
<device id="retina4_7" orientation="portrait" appearance="light"/>
<dependencies>
<deployment identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="16087"/>
<capability name="Safe area layout guides" minToolsVersion="9.0"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<objects>
<placeholder placeholderIdentifier="IBFilesOwner" id="-1" userLabel="File's Owner" customClass="SBKResultFaceVC" customModule="SB_KYC_SDK" customModuleProvider="target">
<connections>
<outlet property="btnClose" destination="Ygn-WM-1nI" id="S8H-fc-nJg"/>
<outlet property="btnNext" destination="h5b-6o-YOM" id="3ZL-XS-BpY"/>
<outlet property="imgFace" destination="EZz-Dn-OWc" id="R4R-Ve-st4"/>
<outlet property="lbDescription" destination="K0l-gD-eeu" id="wcZ-PU-fhu"/>
<outlet property="view" destination="i5M-Pr-FkT" id="sfx-zR-JGt"/>
</connections>
</placeholder>
<placeholder placeholderIdentifier="IBFirstResponder" id="-2" customClass="UIResponder"/>
<view clearsContextBeforeDrawing="NO" contentMode="scaleToFill" id="i5M-Pr-FkT">
<rect key="frame" x="0.0" y="0.0" width="375" height="667"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<subviews>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="IVc-JQ-1gz">
<rect key="frame" x="0.0" y="0.0" width="375" height="44"/>
<subviews>
<button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="Gab-Lf-PmK">
<rect key="frame" x="5" y="7" width="35" height="30"/>
<constraints>
<constraint firstAttribute="width" constant="35" id="IGx-J3-ZAV"/>
<constraint firstAttribute="height" constant="30" id="fKS-62-cDj"/>
</constraints>
<inset key="imageEdgeInsets" minX="10" minY="5" maxX="10" maxY="5"/>
<state key="normal" image="Back.png"/>
<connections>
<action selector="onSelectBackTop:" destination="-1" eventType="touchUpInside" id="PUQ-5A-Xth"/>
</connections>
</button>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="Scan your face" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="Hrc-lO-elK">
<rect key="frame" x="130.5" y="12" width="114" height="20.5"/>
<fontDescription key="fontDescription" type="system" pointSize="17"/>
<nil key="textColor"/>
<nil key="highlightedColor"/>
</label>
</subviews>
<color key="backgroundColor" white="0.0" alpha="0.0" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<constraints>
<constraint firstItem="Hrc-lO-elK" firstAttribute="centerY" secondItem="IVc-JQ-1gz" secondAttribute="centerY" id="Du0-M1-9uh"/>
<constraint firstItem="Gab-Lf-PmK" firstAttribute="leading" secondItem="IVc-JQ-1gz" secondAttribute="leading" constant="5" id="KeV-ki-dki"/>
<constraint firstItem="Hrc-lO-elK" firstAttribute="centerX" secondItem="IVc-JQ-1gz" secondAttribute="centerX" id="Wym-bz-Zeu"/>
<constraint firstAttribute="height" constant="44" id="isg-eJ-ia7"/>
<constraint firstItem="Gab-Lf-PmK" firstAttribute="centerY" secondItem="IVc-JQ-1gz" secondAttribute="centerY" id="vUO-lU-yGw"/>
</constraints>
</view>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="jTF-UR-ztj">
<rect key="frame" x="0.0" y="64" width="375" height="458"/>
<subviews>
<imageView clipsSubviews="YES" userInteractionEnabled="NO" contentMode="scaleToFill" horizontalHuggingPriority="251" verticalHuggingPriority="251" translatesAutoresizingMaskIntoConstraints="NO" id="EZz-Dn-OWc">
<rect key="frame" x="40" y="32.5" width="295" height="393"/>
<constraints>
<constraint firstAttribute="width" secondItem="EZz-Dn-OWc" secondAttribute="height" multiplier="3:4" id="kyG-JG-X8Y"/>
</constraints>
</imageView>
</subviews>
<color key="backgroundColor" white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<constraints>
<constraint firstItem="EZz-Dn-OWc" firstAttribute="centerY" secondItem="jTF-UR-ztj" secondAttribute="centerY" id="20h-GQ-ca0"/>
<constraint firstItem="EZz-Dn-OWc" firstAttribute="leading" secondItem="jTF-UR-ztj" secondAttribute="leading" constant="40" id="Vxp-ei-hfg"/>
<constraint firstItem="EZz-Dn-OWc" firstAttribute="centerX" secondItem="jTF-UR-ztj" secondAttribute="centerX" id="blQ-aY-p3n"/>
</constraints>
</view>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="" textAlignment="center" lineBreakMode="tailTruncation" numberOfLines="0" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="K0l-gD-eeu">
<rect key="frame" x="10" y="532" width="355" height="0.0"/>
<fontDescription key="fontDescription" type="system" pointSize="17"/>
<color key="textColor" white="0.0" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<nil key="highlightedColor"/>
</label>
<stackView opaque="NO" contentMode="scaleToFill" distribution="fillEqually" spacing="10" translatesAutoresizingMaskIntoConstraints="NO" id="G7p-BC-jg4">
<rect key="frame" x="20" y="577" width="335" height="50"/>
<subviews>
<button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="roundedRect" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="Ygn-WM-1nI">
<rect key="frame" x="0.0" y="0.0" width="162.5" height="50"/>
<color key="backgroundColor" systemColor="tertiarySystemGroupedBackgroundColor" red="0.94901960780000005" green="0.94901960780000005" blue="0.96862745100000003" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<fontDescription key="fontDescription" type="boldSystem" pointSize="18"/>
<state key="normal" title="Retake">
<color key="titleColor" red="0.99603885410000004" green="0.46561479569999997" blue="0.0034391828810000002" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
</state>
<userDefinedRuntimeAttributes>
<userDefinedRuntimeAttribute type="number" keyPath="cornerRadius">
<integer key="value" value="25"/>
</userDefinedRuntimeAttribute>
</userDefinedRuntimeAttributes>
<connections>
<action selector="onBack:" destination="-1" eventType="touchUpInside" id="IAb-0C-ak7"/>
</connections>
</button>
<button opaque="NO" contentMode="scaleToFill" semanticContentAttribute="forceRightToLeft" contentHorizontalAlignment="center" contentVerticalAlignment="center" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="h5b-6o-YOM">
<rect key="frame" x="172.5" y="0.0" width="162.5" height="50"/>
<color key="backgroundColor" red="0.98341292140000003" green="0.62477445600000003" blue="0.182400167" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<color key="tintColor" white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<inset key="imageEdgeInsets" minX="10" minY="4" maxX="0.0" maxY="0.0"/>
<state key="normal" title="Confirm" image="Next.png">
<color key="titleColor" white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
</state>
<userDefinedRuntimeAttributes>
<userDefinedRuntimeAttribute type="number" keyPath="cornerRadius">
<integer key="value" value="25"/>
</userDefinedRuntimeAttribute>
</userDefinedRuntimeAttributes>
<connections>
<action selector="onNext:" destination="-1" eventType="touchUpInside" id="7Au-tL-OpF"/>
</connections>
</button>
</subviews>
<constraints>
<constraint firstAttribute="height" constant="50" id="U4E-iW-Pgb"/>
</constraints>
</stackView>
</subviews>
<color key="backgroundColor" white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<constraints>
<constraint firstItem="fnl-2z-Ty3" firstAttribute="trailing" secondItem="IVc-JQ-1gz" secondAttribute="trailing" id="3yE-zI-j42"/>
<constraint firstItem="K0l-gD-eeu" firstAttribute="centerX" secondItem="fnl-2z-Ty3" secondAttribute="centerX" id="8gk-Av-D0o"/>
<constraint firstItem="fnl-2z-Ty3" firstAttribute="trailing" secondItem="jTF-UR-ztj" secondAttribute="trailing" id="ELj-E5-PcU"/>
<constraint firstItem="fnl-2z-Ty3" firstAttribute="trailing" secondItem="G7p-BC-jg4" secondAttribute="trailing" constant="20" id="K7S-w9-Csu"/>
<constraint firstItem="jTF-UR-ztj" firstAttribute="leading" secondItem="fnl-2z-Ty3" secondAttribute="leading" id="Q0C-GZ-u9m"/>
<constraint firstItem="IVc-JQ-1gz" firstAttribute="leading" secondItem="fnl-2z-Ty3" secondAttribute="leading" id="RRz-Z6-RL0"/>
<constraint firstItem="G7p-BC-jg4" firstAttribute="leading" secondItem="fnl-2z-Ty3" secondAttribute="leading" constant="20" id="VPW-4V-SrR"/>
<constraint firstItem="jTF-UR-ztj" firstAttribute="top" secondItem="IVc-JQ-1gz" secondAttribute="bottom" constant="20" id="XrM-V0-72h"/>
<constraint firstItem="G7p-BC-jg4" firstAttribute="top" secondItem="K0l-gD-eeu" secondAttribute="bottom" constant="45" id="cfL-Mz-nlh"/>
<constraint firstItem="K0l-gD-eeu" firstAttribute="leading" secondItem="fnl-2z-Ty3" secondAttribute="leading" constant="10" id="cwR-lR-ot1"/>
<constraint firstItem="IVc-JQ-1gz" firstAttribute="top" secondItem="fnl-2z-Ty3" secondAttribute="top" id="dqy-zw-mvS"/>
<constraint firstItem="fnl-2z-Ty3" firstAttribute="bottom" secondItem="G7p-BC-jg4" secondAttribute="bottom" constant="40" id="qni-G5-mW6"/>
<constraint firstItem="K0l-gD-eeu" firstAttribute="top" secondItem="jTF-UR-ztj" secondAttribute="bottom" constant="10" id="yOm-E4-Arn"/>
</constraints>
<viewLayoutGuide key="safeArea" id="fnl-2z-Ty3"/>
<point key="canvasLocation" x="133" y="131"/>
</view>
</objects>
<resources>
<image name="Back.png" width="31.5" height="53.5"/>
<image name="Next.png" width="30" height="15"/>
</resources>
</document>
//
// ResultCapture.swift
//
// Created by itsol on 5/11/20.
// Copyright © 2020 itsol. All rights reserved.
//
import UIKit
class SBKResultCapture: UIViewController {
@IBOutlet weak var imgPhotoCard: UIImageView!
public var imageData: Data?
public var dataCrop: Data?
@IBOutlet weak var btnNext: UIButton!
@IBOutlet weak var btnClose: UIButton!
@IBOutlet weak var lbDescription: UILabel!
@IBOutlet weak var lbCopyright: UILabel!
var statusScreen: String = "vertical"
var checkScreen: Int = 1
var idFront: String = ""
var idBack: String = ""
var URLToken: String = ""
var completionSuccessResultCard: ([String:Any])->Void = {_ in}
override func viewDidLoad() {
super.viewDidLoad()
}
init() {
super.init(nibName: "SBKResultCapture", bundle: Bundle(for: SBKResultCapture.self))
}
func loadConfigUI() {
//self.lbCopyright.text = Global.copyright
//self.lbCopyright.textColor = UIColor.colorFromHexa(Global.colorTextPreview)
self.lbDescription.textColor = UIColor.colorFromHexa(Global.colorTextPreview)
//self.btnNext.backgroundColor = UIColor.colorFromHexa(Global.colorButtonNext)
//self.btnClose.backgroundColor = UIColor.colorFromHexa(Global.colorButtonBack)
//self.btnNext.setTitleColor(UIColor.colorFromHexa(Global.colorTextButtonNext), for: .normal)
//self.btnClose.setTitleColor(UIColor.colorFromHexa(Global.colorTextButtonBack), for: .normal)
if Global.frontConfig == "" {
// self.lbCopyright.font = UIFont(name: Global.frontConfig, size: 17)
// self.lbDescription.font = UIFont(name: Global.frontConfig, size: 17)
// self.btnNext.titleLabel?.font = UIFont(name: Global.frontConfig, size: 17)
// self.btnClose.titleLabel?.font = UIFont(name: Global.frontConfig, size: 17)
}
//btnNext.setTitle("My license is readable".localized() , for: .normal)
// btnClose.setTitle("Take a new picture".localized(), for: .normal)
lbDescription.text = "Make sure your license details are clear to read, with no blur or glare".localized()
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
self.loadConfigUI()
let imageCap = UIImage(data: imageData!)
let scale = imgPhotoCard.frame.width / imageCap!.size.width
var cropImage: UIImage?
if self.statusScreen == "horizontal" {
cropImage = SBKValidateInput.shared.cropImageHorizontal(image: imageCap!, rect: CGRect(x: imageCap!.size.width * 1 / 10, y: imageCap!.size.height * 3 / 20, width: imageCap!.size.width * 8 / 10, height: imageCap!.size.height * 8 / 10), scale: 1.0)!.rotate(radians: .pi / 2)
} else {
cropImage = self.cropImage(image: imageCap!, rect: CGRect(x: imageCap!.size.width / 20, y: imageCap!.size.height / 8 + imageCap!.size.height / 50, width: imageCap!.size.width * 18 / 20, height: imageCap!.size.width * 18 / 20 * 3 / 4 ), scale: scale)
}
dataCrop = cropImage!.pngData()
self.imgPhotoCard.image = cropImage
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
//Xử lý ảnh hiển thị
func cropImage(image: UIImage, rect: CGRect, scale: CGFloat) -> UIImage? {
UIGraphicsBeginImageContextWithOptions(CGSize(width: rect.width, height: rect.height), true, 0.0)
image.draw(at: CGPoint(x: -rect.origin.x , y: -rect.origin.y ))
let croppedImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return croppedImage
}
func navigateToFace() {
DispatchQueue.main.async {
if Global.typeFace == "TAKEPHOTO" {
let viewControllers = SBKTutorialFaceVC()
viewControllers.idFront = self.idFront
viewControllers.idBack = self.idBack
viewControllers.URLToken = self.URLToken
viewControllers.completionSuccessTutorialFace = { [weak self] data in
guard let `self` = self else { return }
self.completionSuccessResultCard(data)
}
self.navigationController?.pushViewController(viewControllers, animated: true)
} else {
let viewControllers = SBKRecordFace()
viewControllers.idFront = self.idFront
viewControllers.idBack = self.idBack
viewControllers.URLToken = self.URLToken
viewControllers.completionSuccessFaceRecord = { [weak self] data in
guard let `self` = self else { return }
self.completionSuccessResultCard(data)
}
self.navigationController?.pushViewController(viewControllers, animated: true)
}
}
}
//validate ảnh, gửi resquest
@IBAction func onResquest(_ sender: Any) {
if self.checkScreen == 1 {
Global.imageCard1 = dataCrop!
if Global.step == "ALL" {
DispatchQueue.main.async {
let viewControllers = SBKCaptureCardVC()
viewControllers.descriptionScreen = "Back of your personal card".localized()
viewControllers.checkScreen = 2
viewControllers.idFront = self.idFront
viewControllers.URLToken = self.URLToken
viewControllers.completionSuccessCard = { [weak self] data in
guard let `self` = self else { return }
self.completionSuccessResultCard(data)
}
self.navigationController?.pushViewController(viewControllers, animated: true)
}
} else {
self.navigateToFace()
}
} else {
Global.imageCard2 = dataCrop!
self.navigateToFace()
}
}
//quay về màn hình chụp
@IBAction func onBack(_ sender: Any) {
self.navigationController?.popViewController(animated: true)
}
@IBAction func onSelectBackTop(_ sender: Any) {
for controller in self.navigationController!.viewControllers as Array {
if controller.isKind(of: SBKCaptureCardVC.self) {
self.navigationController!.popToViewController(controller, animated: true)
break
}
}
}
}
extension UIImage {
func rotate(radians: Float) -> UIImage? {
var newSize = CGRect(origin: CGPoint.zero, size: self.size).applying(CGAffineTransform(rotationAngle: CGFloat(radians))).size
// Trim off the extremely small float value to prevent core graphics from rounding it up
newSize.width = floor(newSize.width)
newSize.height = floor(newSize.height)
UIGraphicsBeginImageContextWithOptions(newSize, false, self.scale)
let context = UIGraphicsGetCurrentContext()!
// Move origin to middle
context.translateBy(x: newSize.width/2, y: newSize.height/2)
// Rotate around middle
context.rotate(by: CGFloat(radians))
// Draw the image at its center
self.draw(in: CGRect(x: -self.size.width/2, y: -self.size.height/2, width: self.size.width, height: self.size.height))
let newImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return newImage
}
}
<?xml version="1.0" encoding="UTF-8"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.XIB" version="3.0" toolsVersion="16097" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES">
<device id="retina4_0" orientation="portrait" appearance="light"/>
<dependencies>
<deployment identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="16087"/>
<capability name="Safe area layout guides" minToolsVersion="9.0"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<objects>
<placeholder placeholderIdentifier="IBFilesOwner" id="-1" userLabel="File's Owner" customClass="SBKResultCapture" customModule="SB_KYC_SDK" customModuleProvider="target">
<connections>
<outlet property="btnClose" destination="N4O-FJ-vqe" id="fv4-X8-AoR"/>
<outlet property="btnNext" destination="whP-d3-flm" id="coE-dr-yZm"/>
<outlet property="imgPhotoCard" destination="IMd-rw-iRb" id="Alc-1L-dtq"/>
<outlet property="lbCopyright" destination="cKc-Rf-YxK" id="cTB-kz-zEe"/>
<outlet property="lbDescription" destination="er2-v1-Th5" id="kZU-DS-aQH"/>
<outlet property="view" destination="i5M-Pr-FkT" id="sfx-zR-JGt"/>
</connections>
</placeholder>
<placeholder placeholderIdentifier="IBFirstResponder" id="-2" customClass="UIResponder"/>
<view clearsContextBeforeDrawing="NO" contentMode="scaleToFill" id="i5M-Pr-FkT">
<rect key="frame" x="0.0" y="0.0" width="320" height="568"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<subviews>
<button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="uyc-Hd-7Jb">
<rect key="frame" x="5" y="5" width="40" height="30"/>
<constraints>
<constraint firstAttribute="height" constant="30" id="IUg-nz-Giq"/>
<constraint firstAttribute="width" constant="40" id="xTb-Ug-kQU"/>
</constraints>
<inset key="imageEdgeInsets" minX="15" minY="5" maxX="10" maxY="5"/>
<state key="normal" image="Back.png"/>
<connections>
<action selector="onSelectBackTop:" destination="-1" eventType="touchUpInside" id="Dfx-oO-UuR"/>
</connections>
</button>
<label hidden="YES" opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="cKc-Rf-YxK">
<rect key="frame" x="160" y="80" width="0.0" height="0.0"/>
<fontDescription key="fontDescription" type="system" pointSize="17"/>
<nil key="textColor"/>
<nil key="highlightedColor"/>
</label>
<imageView clipsSubviews="YES" userInteractionEnabled="NO" contentMode="scaleAspectFit" horizontalHuggingPriority="251" verticalHuggingPriority="251" translatesAutoresizingMaskIntoConstraints="NO" id="IMd-rw-iRb">
<rect key="frame" x="20" y="120" width="280" height="210"/>
<constraints>
<constraint firstAttribute="width" secondItem="IMd-rw-iRb" secondAttribute="height" multiplier="4:3" id="YKq-L9-E3E"/>
</constraints>
</imageView>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="" textAlignment="center" lineBreakMode="tailTruncation" numberOfLines="0" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="er2-v1-Th5">
<rect key="frame" x="20" y="433" width="280" height="0.0"/>
<fontDescription key="fontDescription" type="system" pointSize="17"/>
<color key="textColor" white="0.0" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<nil key="highlightedColor"/>
</label>
<stackView opaque="NO" contentMode="scaleToFill" distribution="fillEqually" spacing="10" translatesAutoresizingMaskIntoConstraints="NO" id="hKU-OV-8Xz">
<rect key="frame" x="20" y="478" width="280" height="50"/>
<subviews>
<button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="roundedRect" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="N4O-FJ-vqe">
<rect key="frame" x="0.0" y="0.0" width="135" height="50"/>
<color key="backgroundColor" systemColor="tertiarySystemBackgroundColor" white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<fontDescription key="fontDescription" type="boldSystem" pointSize="17"/>
<state key="normal" title="Retake">
<color key="titleColor" red="0.99281400440000001" green="0.49675828220000001" blue="0.085976324980000005" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
</state>
<userDefinedRuntimeAttributes>
<userDefinedRuntimeAttribute type="number" keyPath="cornerRadius">
<integer key="value" value="25"/>
</userDefinedRuntimeAttribute>
</userDefinedRuntimeAttributes>
<connections>
<action selector="onBack:" destination="-1" eventType="touchUpInside" id="zbK-Eg-z3L"/>
</connections>
</button>
<button opaque="NO" contentMode="scaleToFill" semanticContentAttribute="forceRightToLeft" contentHorizontalAlignment="center" contentVerticalAlignment="center" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="whP-d3-flm">
<rect key="frame" x="145" y="0.0" width="135" height="50"/>
<color key="backgroundColor" red="0.97634154559999997" green="0.56879520419999996" blue="0.16075372700000001" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<fontDescription key="fontDescription" type="boldSystem" pointSize="18"/>
<inset key="imageEdgeInsets" minX="10" minY="0.0" maxX="0.0" maxY="0.0"/>
<state key="normal" title="Confirm" image="Next.png">
<color key="titleColor" white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
</state>
<userDefinedRuntimeAttributes>
<userDefinedRuntimeAttribute type="number" keyPath="cornerRadius">
<integer key="value" value="25"/>
</userDefinedRuntimeAttribute>
</userDefinedRuntimeAttributes>
<connections>
<action selector="onResquest:" destination="-1" eventType="touchUpInside" id="BK3-9t-zNO"/>
</connections>
</button>
</subviews>
<constraints>
<constraint firstAttribute="height" constant="50" id="Ci9-Po-j1o"/>
</constraints>
</stackView>
</subviews>
<color key="backgroundColor" red="1" green="1" blue="1" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<constraints>
<constraint firstItem="cKc-Rf-YxK" firstAttribute="top" secondItem="fnl-2z-Ty3" secondAttribute="top" constant="80" id="0Xs-6w-nMv"/>
<constraint firstItem="fnl-2z-Ty3" firstAttribute="leading" secondItem="er2-v1-Th5" secondAttribute="leading" constant="-20" id="1Lg-Pe-Lct"/>
<constraint firstItem="hKU-OV-8Xz" firstAttribute="top" secondItem="er2-v1-Th5" secondAttribute="bottom" constant="45" id="36o-ZU-HmX"/>
<constraint firstItem="hKU-OV-8Xz" firstAttribute="leading" secondItem="fnl-2z-Ty3" secondAttribute="leading" constant="20" id="6ji-8a-EVE"/>
<constraint firstItem="uyc-Hd-7Jb" firstAttribute="leading" secondItem="fnl-2z-Ty3" secondAttribute="leading" constant="5" id="87L-29-Scz"/>
<constraint firstItem="er2-v1-Th5" firstAttribute="centerX" secondItem="fnl-2z-Ty3" secondAttribute="centerX" id="DW0-Ay-p5l"/>
<constraint firstItem="fnl-2z-Ty3" firstAttribute="bottom" secondItem="hKU-OV-8Xz" secondAttribute="bottom" constant="40" id="Ddb-Pw-GTx"/>
<constraint firstItem="IMd-rw-iRb" firstAttribute="top" secondItem="cKc-Rf-YxK" secondAttribute="bottom" constant="40" id="Hmq-XT-RXq"/>
<constraint firstItem="fnl-2z-Ty3" firstAttribute="trailing" secondItem="IMd-rw-iRb" secondAttribute="trailing" constant="20" id="IvA-sL-Xwm"/>
<constraint firstItem="cKc-Rf-YxK" firstAttribute="centerX" secondItem="fnl-2z-Ty3" secondAttribute="centerX" id="Q5c-vK-ezA"/>
<constraint firstItem="hKU-OV-8Xz" firstAttribute="centerX" secondItem="fnl-2z-Ty3" secondAttribute="centerX" id="ZpP-oW-71S"/>
<constraint firstItem="IMd-rw-iRb" firstAttribute="leading" secondItem="i5M-Pr-FkT" secondAttribute="leading" constant="20" id="je7-ie-JdT"/>
<constraint firstItem="uyc-Hd-7Jb" firstAttribute="top" secondItem="fnl-2z-Ty3" secondAttribute="top" constant="5" id="kai-jC-edg"/>
<constraint firstItem="er2-v1-Th5" firstAttribute="top" relation="greaterThanOrEqual" secondItem="IMd-rw-iRb" secondAttribute="bottom" constant="10" id="vzk-mC-JPg"/>
</constraints>
<viewLayoutGuide key="safeArea" id="fnl-2z-Ty3"/>
<point key="canvasLocation" x="133" y="154"/>
</view>
</objects>
<resources>
<image name="Back.png" width="31.5" height="53.5"/>
<image name="Next.png" width="30" height="15"/>
</resources>
</document>
//
// TutorialVC.swift
//
// Created by itsol on 5/11/20.
// Copyright © 2020 itsol. All rights reserved.
//
import UIKit
class SBKTutorialVC: UIViewController {
@IBOutlet weak var btnProceed: UIButton!
var URLToken: String = ""
var completionSuccessTutorial: ([String:Any]) -> Void = {_ in}
override func viewDidLoad() {
super.viewDidLoad()
// var image = UIImage(named: "back", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil)
// image = image?.withRenderingMode(.alwaysOriginal)
//
// let button = UIButton(type: .system)
// button.setImage(image, for: .normal)
// button.setTitle("Back".localized(), for: .normal)
// button.imageEdgeInsets.left = -5
// button.setTitleColor(UIColor.black, for: .normal)
// button.sizeToFit()
// button.addTarget(self, action: #selector(self.back), for: .touchUpInside)
//
// let newBackButton = UIBarButtonItem(customView: button)
// self.navigationItem.leftBarButtonItem = newBackButton
// btnProceed.imageView?.contentMode = .scaleAspectFit
// btnProceed.imageEdgeInsets = UIEdgeInsets(top: 100, left: 200, bottom: 100, right: 200)
}
//Chuyển sang màn chụp thẻ mặt trước
@IBAction func onProceedToCaptureID(_ sender: Any) {
let controller = SBKCaptureCardVC()
controller.URLToken = self.URLToken
controller.completionSuccessCard = { [weak self] data in
guard let `self` = self else { return }
self.completionSuccessTutorial(data)
}
self.navigationController?.pushViewController(controller, animated: true)
}
override func viewDidAppear(_ animated: Bool) {
// btnProceed.setTitle("Proceed to Capture ID".localized(), for: .normal)
// btnProceed.backgroundColor = UIColor.colorFromHexa(Global.colorButtonTutorial)
}
@IBAction func onSelectBack(_ sender: Any) {
self.navigationController?.popViewController(animated: true)
}
//Sự kiện back màn hình
@objc func back(sender: UIBarButtonItem) {
self.navigationController?.popViewController(animated: true)
}
init(URLToken: String) {
super.init(nibName: "SBKTutorialVC", bundle: Bundle(for: SBKTutorialVC.self))
self.URLToken = URLToken
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
}
<?xml version="1.0" encoding="UTF-8"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.XIB" version="3.0" toolsVersion="16097" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES">
<device id="retina6_5" orientation="portrait" appearance="light"/>
<dependencies>
<deployment identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="16087"/>
<capability name="Safe area layout guides" minToolsVersion="9.0"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<objects>
<placeholder placeholderIdentifier="IBFilesOwner" id="-1" userLabel="File's Owner" customClass="SBKTutorialVC" customModule="SB_KYC_SDK" customModuleProvider="target">
<connections>
<outlet property="btnProceed" destination="76Z-UO-zeX" id="evL-Pr-yDW"/>
<outlet property="view" destination="i5M-Pr-FkT" id="sfx-zR-JGt"/>
</connections>
</placeholder>
<placeholder placeholderIdentifier="IBFirstResponder" id="-2" customClass="UIResponder"/>
<view clearsContextBeforeDrawing="NO" contentMode="scaleToFill" id="i5M-Pr-FkT">
<rect key="frame" x="0.0" y="0.0" width="414" height="896"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<subviews>
<stackView opaque="NO" contentMode="scaleToFill" axis="vertical" distribution="equalSpacing" translatesAutoresizingMaskIntoConstraints="NO" id="naM-63-c8r">
<rect key="frame" x="0.0" y="44" width="414" height="818"/>
<subviews>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="Egn-jr-y8Q">
<rect key="frame" x="0.0" y="0.0" width="414" height="260.33333333333331"/>
<subviews>
<button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="9Z2-gr-YPL">
<rect key="frame" x="5.0000000000000018" y="10" width="31.666666666666671" height="30"/>
<constraints>
<constraint firstAttribute="height" constant="30" id="nI5-d9-mWW"/>
</constraints>
<inset key="imageEdgeInsets" minX="11" minY="5" maxX="7" maxY="5"/>
<state key="normal" image="Back.png"/>
<connections>
<action selector="onSelectBack:" destination="-1" eventType="touchUpInside" id="Kvk-tK-sRy"/>
</connections>
</button>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" misplaced="YES" text="We support scan following document types" textAlignment="center" lineBreakMode="tailTruncation" numberOfLines="0" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="5zV-az-SvM">
<rect key="frame" x="10" y="82" width="394" height="62.333333333333329"/>
<fontDescription key="fontDescription" type="system" pointSize="17"/>
<nil key="textColor"/>
<nil key="highlightedColor"/>
</label>
<stackView opaque="NO" contentMode="scaleToFill" distribution="equalSpacing" translatesAutoresizingMaskIntoConstraints="NO" id="Reg-bd-ohy">
<rect key="frame" x="50" y="170.33333333333334" width="314" height="50"/>
<subviews>
<imageView clipsSubviews="YES" userInteractionEnabled="NO" contentMode="scaleAspectFit" horizontalHuggingPriority="251" verticalHuggingPriority="251" image="Card-2.png" translatesAutoresizingMaskIntoConstraints="NO" id="VPm-2o-fQT">
<rect key="frame" x="0.0" y="0.0" width="50" height="50"/>
<constraints>
<constraint firstAttribute="height" constant="50" id="Qcw-a3-Zx2"/>
<constraint firstAttribute="width" constant="50" id="vlU-Yq-RsR"/>
</constraints>
</imageView>
<imageView clipsSubviews="YES" userInteractionEnabled="NO" contentMode="scaleAspectFit" horizontalHuggingPriority="251" verticalHuggingPriority="251" image="Car-2 copy.png" translatesAutoresizingMaskIntoConstraints="NO" id="NjS-dY-Jqt">
<rect key="frame" x="132" y="0.0" width="50" height="50"/>
<constraints>
<constraint firstAttribute="height" constant="50" id="eDc-An-55k"/>
<constraint firstAttribute="width" constant="50" id="wFt-R5-9QV"/>
</constraints>
</imageView>
<imageView clipsSubviews="YES" userInteractionEnabled="NO" contentMode="scaleAspectFit" horizontalHuggingPriority="251" verticalHuggingPriority="251" image="Passport-2 copy.png" translatesAutoresizingMaskIntoConstraints="NO" id="QAq-eW-MtG">
<rect key="frame" x="264" y="0.0" width="50" height="50"/>
<constraints>
<constraint firstAttribute="width" constant="50" id="gHB-T1-ueG"/>
<constraint firstAttribute="height" constant="50" id="gzA-WP-r8q"/>
</constraints>
</imageView>
</subviews>
</stackView>
</subviews>
<color key="backgroundColor" red="0.89469248059999995" green="0.96409291029999999" blue="0.9894767404" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<constraints>
<constraint firstItem="9Z2-gr-YPL" firstAttribute="leading" secondItem="Egn-jr-y8Q" secondAttribute="leading" constant="5" id="CvC-zG-fom"/>
<constraint firstAttribute="bottom" secondItem="Reg-bd-ohy" secondAttribute="bottom" constant="40" id="UAr-Fu-Jyl"/>
<constraint firstItem="Reg-bd-ohy" firstAttribute="leading" secondItem="Egn-jr-y8Q" secondAttribute="leading" constant="50" id="arF-5E-K8C"/>
<constraint firstItem="5zV-az-SvM" firstAttribute="leading" secondItem="Egn-jr-y8Q" secondAttribute="leading" constant="10" id="l4m-ak-kpS"/>
<constraint firstItem="Reg-bd-ohy" firstAttribute="top" secondItem="5zV-az-SvM" secondAttribute="bottom" constant="10" id="p7A-Dt-qDA"/>
<constraint firstItem="5zV-az-SvM" firstAttribute="top" secondItem="9Z2-gr-YPL" secondAttribute="bottom" constant="40" id="rwl-rN-lKg"/>
<constraint firstAttribute="trailing" secondItem="Reg-bd-ohy" secondAttribute="trailing" constant="50" id="wtc-Ui-ddG"/>
<constraint firstItem="5zV-az-SvM" firstAttribute="centerX" secondItem="Egn-jr-y8Q" secondAttribute="centerX" id="ycj-7C-Ck6"/>
<constraint firstItem="9Z2-gr-YPL" firstAttribute="top" secondItem="Egn-jr-y8Q" secondAttribute="top" constant="10" id="zyc-tM-TZx"/>
</constraints>
</view>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="8Yt-Qx-ATJ">
<rect key="frame" x="0.0" y="329.33333333333331" width="414" height="319.99999999999994"/>
<subviews>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="To verify one's identity, follow the tips below" textAlignment="center" lineBreakMode="tailTruncation" numberOfLines="0" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="Wr6-Yc-KZF">
<rect key="frame" x="20" y="10.000000000000004" width="374" height="58.333333333333343"/>
<fontDescription key="fontDescription" type="system" pointSize="17"/>
<nil key="textColor"/>
<nil key="highlightedColor"/>
</label>
<stackView opaque="NO" contentMode="scaleToFill" distribution="fillEqually" spacing="10" translatesAutoresizingMaskIntoConstraints="NO" id="m3X-6L-a5Q">
<rect key="frame" x="10" y="88.333333333333357" width="394" height="211.66666666666663"/>
<subviews>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="V2d-3w-egK">
<rect key="frame" x="0.0" y="0.0" width="124.66666666666667" height="211.66666666666666"/>
<subviews>
<imageView clipsSubviews="YES" userInteractionEnabled="NO" contentMode="scaleAspectFit" horizontalHuggingPriority="251" verticalHuggingPriority="251" image="Place within the box.png" translatesAutoresizingMaskIntoConstraints="NO" id="jha-tX-Ply">
<rect key="frame" x="0.0" y="0.0" width="124.66666666666667" height="150"/>
<constraints>
<constraint firstAttribute="height" constant="150" id="N3d-EG-uqC"/>
</constraints>
</imageView>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="Place within the box" textAlignment="center" lineBreakMode="tailTruncation" numberOfLines="0" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="zKV-7d-S9Z">
<rect key="frame" x="0.0" y="154.99999999999994" width="124.66666666666667" height="46.666666666666657"/>
<fontDescription key="fontDescription" type="system" pointSize="12"/>
<nil key="textColor"/>
<nil key="highlightedColor"/>
</label>
</subviews>
<color key="backgroundColor" systemColor="systemBackgroundColor" cocoaTouchSystemColor="whiteColor"/>
<constraints>
<constraint firstItem="zKV-7d-S9Z" firstAttribute="top" secondItem="jha-tX-Ply" secondAttribute="bottom" constant="5" id="0gF-Zp-9Eg"/>
<constraint firstItem="jha-tX-Ply" firstAttribute="top" secondItem="V2d-3w-egK" secondAttribute="top" id="CNG-ni-qDi"/>
<constraint firstItem="jha-tX-Ply" firstAttribute="centerX" secondItem="V2d-3w-egK" secondAttribute="centerX" id="Gxe-UO-cGV"/>
<constraint firstAttribute="trailing" secondItem="zKV-7d-S9Z" secondAttribute="trailing" id="JJj-K3-Svm"/>
<constraint firstItem="jha-tX-Ply" firstAttribute="leading" secondItem="V2d-3w-egK" secondAttribute="leading" id="d3Y-cn-nJG"/>
<constraint firstAttribute="bottom" secondItem="zKV-7d-S9Z" secondAttribute="bottom" constant="10" id="jHE-c9-7KT"/>
<constraint firstItem="zKV-7d-S9Z" firstAttribute="leading" secondItem="V2d-3w-egK" secondAttribute="leading" id="tXe-2P-HA4"/>
</constraints>
</view>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="Ejd-uk-YRU">
<rect key="frame" x="134.66666666666666" y="0.0" width="124.66666666666666" height="211.66666666666666"/>
<subviews>
<imageView clipsSubviews="YES" userInteractionEnabled="NO" contentMode="scaleAspectFit" horizontalHuggingPriority="251" verticalHuggingPriority="251" image="Do not place outside.png" translatesAutoresizingMaskIntoConstraints="NO" id="i67-SP-e2c">
<rect key="frame" x="0.0" y="0.0" width="124.66666666666667" height="150"/>
<constraints>
<constraint firstAttribute="height" constant="150" id="mCO-kV-CmI"/>
</constraints>
</imageView>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="Do not place outside" textAlignment="center" lineBreakMode="tailTruncation" numberOfLines="0" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="NCX-HI-DmM">
<rect key="frame" x="0.0" y="154.99999999999994" width="124.66666666666667" height="46.666666666666657"/>
<fontDescription key="fontDescription" type="system" pointSize="12"/>
<nil key="textColor"/>
<nil key="highlightedColor"/>
</label>
</subviews>
<color key="backgroundColor" systemColor="systemBackgroundColor" cocoaTouchSystemColor="whiteColor"/>
<constraints>
<constraint firstItem="NCX-HI-DmM" firstAttribute="top" secondItem="i67-SP-e2c" secondAttribute="bottom" constant="5" id="A45-4M-oWc"/>
<constraint firstAttribute="bottom" secondItem="NCX-HI-DmM" secondAttribute="bottom" constant="10" id="BCI-ji-a8n"/>
<constraint firstItem="i67-SP-e2c" firstAttribute="top" secondItem="Ejd-uk-YRU" secondAttribute="top" id="CQI-1J-Hgp"/>
<constraint firstAttribute="trailing" secondItem="NCX-HI-DmM" secondAttribute="trailing" id="CWh-Ov-x9i"/>
<constraint firstItem="i67-SP-e2c" firstAttribute="leading" secondItem="Ejd-uk-YRU" secondAttribute="leading" id="J11-Z5-hbT"/>
<constraint firstAttribute="trailing" secondItem="i67-SP-e2c" secondAttribute="trailing" id="UkC-K8-y9H"/>
<constraint firstItem="NCX-HI-DmM" firstAttribute="leading" secondItem="Ejd-uk-YRU" secondAttribute="leading" id="lJZ-Jc-bho"/>
</constraints>
</view>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="LIh-la-7y5">
<rect key="frame" x="269.33333333333331" y="0.0" width="124.66666666666669" height="211.66666666666666"/>
<subviews>
<imageView clipsSubviews="YES" userInteractionEnabled="NO" contentMode="scaleAspectFit" horizontalHuggingPriority="251" verticalHuggingPriority="251" image="Avoid glare.png" translatesAutoresizingMaskIntoConstraints="NO" id="C0o-3I-u7c">
<rect key="frame" x="0.0" y="0.0" width="124.66666666666667" height="150"/>
<constraints>
<constraint firstAttribute="height" constant="150" id="JGu-r4-yPN"/>
</constraints>
</imageView>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="Avoid glare" textAlignment="center" lineBreakMode="tailTruncation" numberOfLines="0" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="vzC-Ny-HhW">
<rect key="frame" x="0.0" y="154.99999999999994" width="124.66666666666667" height="46.666666666666657"/>
<fontDescription key="fontDescription" type="system" pointSize="12"/>
<nil key="textColor"/>
<nil key="highlightedColor"/>
</label>
</subviews>
<color key="backgroundColor" systemColor="systemBackgroundColor" cocoaTouchSystemColor="whiteColor"/>
<constraints>
<constraint firstAttribute="bottom" secondItem="vzC-Ny-HhW" secondAttribute="bottom" constant="10" id="Fcl-GZ-xQK"/>
<constraint firstItem="vzC-Ny-HhW" firstAttribute="leading" secondItem="LIh-la-7y5" secondAttribute="leading" id="MPX-T8-Sal"/>
<constraint firstItem="C0o-3I-u7c" firstAttribute="top" secondItem="LIh-la-7y5" secondAttribute="top" id="RJw-qm-144"/>
<constraint firstAttribute="trailing" secondItem="C0o-3I-u7c" secondAttribute="trailing" id="S5b-CX-POG"/>
<constraint firstItem="C0o-3I-u7c" firstAttribute="leading" secondItem="LIh-la-7y5" secondAttribute="leading" id="rPq-e1-RU7"/>
<constraint firstAttribute="trailing" secondItem="vzC-Ny-HhW" secondAttribute="trailing" id="uYE-z2-6rS"/>
<constraint firstItem="vzC-Ny-HhW" firstAttribute="top" secondItem="C0o-3I-u7c" secondAttribute="bottom" constant="5" id="xfk-Y8-eMc"/>
</constraints>
</view>
</subviews>
</stackView>
</subviews>
<color key="backgroundColor" systemColor="systemBackgroundColor" cocoaTouchSystemColor="whiteColor"/>
<constraints>
<constraint firstItem="m3X-6L-a5Q" firstAttribute="leading" secondItem="8Yt-Qx-ATJ" secondAttribute="leading" constant="10" id="ABq-Wg-xoM"/>
<constraint firstItem="m3X-6L-a5Q" firstAttribute="centerX" secondItem="8Yt-Qx-ATJ" secondAttribute="centerX" id="KdF-op-tbT"/>
<constraint firstItem="Wr6-Yc-KZF" firstAttribute="leading" secondItem="8Yt-Qx-ATJ" secondAttribute="leading" constant="20" id="Sn6-Vn-wsR"/>
<constraint firstItem="m3X-6L-a5Q" firstAttribute="top" secondItem="Wr6-Yc-KZF" secondAttribute="bottom" constant="20" id="WwF-dX-jDB"/>
<constraint firstAttribute="bottom" secondItem="m3X-6L-a5Q" secondAttribute="bottom" constant="20" id="YsV-JH-2G3"/>
<constraint firstItem="Wr6-Yc-KZF" firstAttribute="centerX" secondItem="8Yt-Qx-ATJ" secondAttribute="centerX" id="hlh-Ri-f6O"/>
<constraint firstItem="Wr6-Yc-KZF" firstAttribute="top" secondItem="8Yt-Qx-ATJ" secondAttribute="top" constant="10" id="i3q-6J-12G"/>
</constraints>
</view>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="N3E-7I-DJ4">
<rect key="frame" x="0.0" y="718" width="414" height="100"/>
<subviews>
<button opaque="NO" contentMode="scaleToFill" semanticContentAttribute="forceRightToLeft" contentHorizontalAlignment="center" contentVerticalAlignment="center" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="76Z-UO-zeX">
<rect key="frame" x="30" y="0.0" width="354" height="60"/>
<color key="backgroundColor" red="0.97999650240000002" green="0.61803328989999995" blue="0.265196979" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<constraints>
<constraint firstAttribute="height" constant="60" id="CnQ-pJ-WEj"/>
</constraints>
<fontDescription key="fontDescription" type="boldSystem" pointSize="21"/>
<inset key="imageEdgeInsets" minX="15" minY="0.0" maxX="0.0" maxY="0.0"/>
<state key="normal" title="Start" image="Next.png">
<color key="titleColor" white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
</state>
<userDefinedRuntimeAttributes>
<userDefinedRuntimeAttribute type="number" keyPath="cornerRadius">
<integer key="value" value="30"/>
</userDefinedRuntimeAttribute>
</userDefinedRuntimeAttributes>
<connections>
<action selector="onProceedToCaptureID:" destination="-1" eventType="touchUpInside" id="AOi-Tc-hDQ"/>
</connections>
</button>
</subviews>
<color key="backgroundColor" systemColor="systemBackgroundColor" cocoaTouchSystemColor="whiteColor"/>
<constraints>
<constraint firstItem="76Z-UO-zeX" firstAttribute="leading" secondItem="N3E-7I-DJ4" secondAttribute="leading" constant="30" id="KSM-aM-TyU"/>
<constraint firstItem="76Z-UO-zeX" firstAttribute="top" secondItem="N3E-7I-DJ4" secondAttribute="top" id="U01-bN-F1w"/>
<constraint firstItem="76Z-UO-zeX" firstAttribute="centerX" secondItem="N3E-7I-DJ4" secondAttribute="centerX" id="WAl-XI-ydg"/>
<constraint firstAttribute="height" constant="100" id="g3p-jK-Os4"/>
</constraints>
</view>
</subviews>
<constraints>
<constraint firstItem="Egn-jr-y8Q" firstAttribute="leading" secondItem="naM-63-c8r" secondAttribute="leading" id="drN-A2-DDq"/>
<constraint firstAttribute="trailing" secondItem="Egn-jr-y8Q" secondAttribute="trailing" id="xgn-M8-g6a"/>
</constraints>
</stackView>
</subviews>
<color key="backgroundColor" white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<constraints>
<constraint firstItem="naM-63-c8r" firstAttribute="trailing" secondItem="fnl-2z-Ty3" secondAttribute="trailing" id="4na-RD-0w6"/>
<constraint firstItem="naM-63-c8r" firstAttribute="leading" secondItem="fnl-2z-Ty3" secondAttribute="leading" id="AGr-jy-DpK"/>
<constraint firstItem="naM-63-c8r" firstAttribute="top" secondItem="fnl-2z-Ty3" secondAttribute="top" id="ghF-YC-kA3"/>
<constraint firstItem="fnl-2z-Ty3" firstAttribute="bottom" secondItem="naM-63-c8r" secondAttribute="bottom" id="mgA-Kd-cV3"/>
</constraints>
<nil key="simulatedTopBarMetrics"/>
<viewLayoutGuide key="safeArea" id="fnl-2z-Ty3"/>
<point key="canvasLocation" x="131.8840579710145" y="93.75"/>
</view>
</objects>
<resources>
<image name="Avoid glare.png" width="341" height="390"/>
<image name="Back.png" width="31.5" height="53.5"/>
<image name="Car-2 copy.png" width="155" height="144"/>
<image name="Card-2.png" width="149.5" height="130.5"/>
<image name="Do not place outside.png" width="341" height="390"/>
<image name="Next.png" width="30" height="15"/>
<image name="Passport-2 copy.png" width="118.5" height="123.5"/>
<image name="Place within the box.png" width="341" height="390.5"/>
</resources>
</document>
//
// TutorialFaceVC.swift
// SSSSS
//
// Created by itsol on 5/15/20.
// Copyright © 2020 itsol. All rights reserved.
//
import UIKit
class SBKTutorialFaceVC: UIViewController {
@IBOutlet weak var brightLight: UILabel!
@IBOutlet weak var lbNoHat: UILabel!
@IBOutlet weak var lbNoGlasses: UILabel!
@IBOutlet weak var btnNext: UIButton!
var idFront: String = ""
var idBack: String = ""
var URLToken: String = ""
var completionSuccessTutorialFace: ([String:Any])->Void = {_ in}
override func viewDidLoad() {
super.viewDidLoad()
//Cài đặt button Back
var image = UIImage(named: "back", in: Bundle(for: SBKTutorialVC.self), compatibleWith: nil)
image = image?.withRenderingMode(.alwaysOriginal)
let button = UIButton(type: .system)
button.setImage(image, for: .normal)
button.setTitle("Back".localized(), for: .normal)
button.sizeToFit()
button.imageEdgeInsets.left = -5
button.setTitleColor(UIColor.black, for: .normal)
button.addTarget(self, action: #selector(self.back), for: .touchUpInside)
let newBackButton = UIBarButtonItem(customView: button)
self.navigationItem.leftBarButtonItem = newBackButton
}
//Xử lý back màn hình
@objc func back(sender: UIBarButtonItem) {
for controller in self.navigationController!.viewControllers as Array {
if controller.isKind(of: SBKCaptureCardVC.self) {
self.navigationController!.popToViewController(controller, animated: true)
break
}
}
}
override func viewDidAppear(_ animated: Bool) {
self.lbNoHat.text = "No Hat".localized()
self.lbNoGlasses.text = "No Glasses".localized()
self.brightLight.text = "Bright Light".localized()
// self.btnNext.setTitle("Proceed to Take Selfie".localized(), for: .normal)
//self.btnNext.backgroundColor = UIColor.colorFromHexa(Global.colorButtonTutorial)
}
init() {
super.init(nibName: "SBKTutorialFaceVC", bundle: Bundle(for: SBKTutorialFaceVC.self))
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
//Chuyển sang màn hình chụp mặt
@IBAction func onStart(_ sender: Any) {
DispatchQueue.main.async {
let viewControllers = SBKCaptureFaceVC()
viewControllers.idFront = self.idFront
viewControllers.idBack = self.idBack
viewControllers.URLToken = self.URLToken
viewControllers.completionSuccessFace = { [weak self] data in
guard let `self` = self else { return }
self.completionSuccessTutorialFace(data)
}
self.navigationController?.pushViewController(viewControllers, animated: true)
}
}
}
<?xml version="1.0" encoding="UTF-8"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.XIB" version="3.0" toolsVersion="16097" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES">
<device id="retina4_7" orientation="portrait" appearance="light"/>
<dependencies>
<deployment identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="16087"/>
<capability name="Safe area layout guides" minToolsVersion="9.0"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<objects>
<placeholder placeholderIdentifier="IBFilesOwner" id="-1" userLabel="File's Owner" customClass="SBKTutorialFaceVC" customModule="SB_KYC_SDK" customModuleProvider="target">
<connections>
<outlet property="brightLight" destination="LEf-mb-EEw" id="nK8-5V-eLG"/>
<outlet property="btnNext" destination="BHJ-nE-nba" id="Zbt-pH-Uxk"/>
<outlet property="lbNoGlasses" destination="OBc-kz-bve" id="x7p-Ig-BOm"/>
<outlet property="lbNoHat" destination="0uW-uu-fti" id="FAP-fX-vtS"/>
<outlet property="view" destination="i5M-Pr-FkT" id="sfx-zR-JGt"/>
</connections>
</placeholder>
<placeholder placeholderIdentifier="IBFirstResponder" id="-2" customClass="UIResponder"/>
<view clearsContextBeforeDrawing="NO" contentMode="scaleToFill" id="i5M-Pr-FkT">
<rect key="frame" x="0.0" y="0.0" width="375" height="667"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<subviews>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="OPE-9D-Ejt">
<rect key="frame" x="0.0" y="0.0" width="375" height="44"/>
<subviews>
<button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="qLe-oj-7mV">
<rect key="frame" x="5" y="7" width="40" height="30"/>
<color key="backgroundColor" white="0.0" alpha="0.0" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<constraints>
<constraint firstAttribute="width" constant="40" id="74I-lK-mGx"/>
<constraint firstAttribute="height" constant="30" id="T4K-9i-L3f"/>
</constraints>
<inset key="imageEdgeInsets" minX="15" minY="5" maxX="9" maxY="5"/>
<state key="normal" image="Back.png"/>
</button>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="Scan your face" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="Qqj-sU-jQT">
<rect key="frame" x="130.5" y="12" width="114" height="20.5"/>
<color key="backgroundColor" white="0.0" alpha="0.0" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<fontDescription key="fontDescription" type="system" pointSize="17"/>
<color key="textColor" white="0.0" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<nil key="highlightedColor"/>
</label>
</subviews>
<color key="backgroundColor" white="0.0" alpha="0.0" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<constraints>
<constraint firstItem="Qqj-sU-jQT" firstAttribute="centerX" secondItem="OPE-9D-Ejt" secondAttribute="centerX" id="4EY-Ii-nir"/>
<constraint firstItem="Qqj-sU-jQT" firstAttribute="centerY" secondItem="OPE-9D-Ejt" secondAttribute="centerY" id="ClW-a7-Ccy"/>
<constraint firstItem="qLe-oj-7mV" firstAttribute="centerY" secondItem="OPE-9D-Ejt" secondAttribute="centerY" id="F30-bn-MCO"/>
<constraint firstAttribute="height" constant="44" id="bUG-3B-1Ee"/>
<constraint firstItem="qLe-oj-7mV" firstAttribute="leading" secondItem="OPE-9D-Ejt" secondAttribute="leading" constant="5" id="vrm-xe-j2q"/>
</constraints>
</view>
<stackView opaque="NO" contentMode="scaleToFill" axis="vertical" distribution="equalSpacing" spacing="20" translatesAutoresizingMaskIntoConstraints="NO" id="DGc-C0-sHE">
<rect key="frame" x="0.0" y="74" width="375" height="493"/>
<subviews>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="B0b-8u-syU">
<rect key="frame" x="0.0" y="0.0" width="375" height="288.5"/>
<subviews>
<imageView clipsSubviews="YES" userInteractionEnabled="NO" contentMode="scaleAspectFit" horizontalHuggingPriority="251" verticalHuggingPriority="251" image="Holdphone.png" translatesAutoresizingMaskIntoConstraints="NO" id="a2K-28-0bh">
<rect key="frame" x="0.0" y="0.0" width="375" height="200"/>
<constraints>
<constraint firstAttribute="height" constant="200" id="rqs-Rl-NNv"/>
</constraints>
</imageView>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="HOLD PHONE IN FRONT OF YOU" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="UHt-VG-Eqt">
<rect key="frame" x="60.5" y="220" width="254.5" height="68.5"/>
<fontDescription key="fontDescription" type="system" pointSize="17"/>
<nil key="textColor"/>
<nil key="highlightedColor"/>
</label>
</subviews>
<color key="backgroundColor" systemColor="systemBackgroundColor" cocoaTouchSystemColor="whiteColor"/>
<constraints>
<constraint firstItem="UHt-VG-Eqt" firstAttribute="top" secondItem="a2K-28-0bh" secondAttribute="bottom" constant="20" id="83m-FQ-w5O"/>
<constraint firstItem="a2K-28-0bh" firstAttribute="leading" secondItem="B0b-8u-syU" secondAttribute="leading" id="9iG-MJ-Es4"/>
<constraint firstItem="a2K-28-0bh" firstAttribute="top" secondItem="B0b-8u-syU" secondAttribute="top" id="Fog-W0-gJ3"/>
<constraint firstAttribute="trailing" secondItem="a2K-28-0bh" secondAttribute="trailing" id="HgR-09-Iy9"/>
<constraint firstAttribute="bottom" secondItem="UHt-VG-Eqt" secondAttribute="bottom" id="L6W-8m-H4Z"/>
<constraint firstItem="UHt-VG-Eqt" firstAttribute="centerX" secondItem="B0b-8u-syU" secondAttribute="centerX" id="uso-A0-SMX"/>
</constraints>
</view>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="L6w-YX-Eky">
<rect key="frame" x="0.0" y="308.5" width="375" height="184.5"/>
<subviews>
<stackView opaque="NO" contentMode="scaleToFill" distribution="fillEqually" spacing="10" translatesAutoresizingMaskIntoConstraints="NO" id="xhU-Sx-ZlE">
<rect key="frame" x="20" y="0.0" width="335" height="184.5"/>
<subviews>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="MrI-x8-Aaa">
<rect key="frame" x="0.0" y="0.0" width="105" height="184.5"/>
<subviews>
<imageView clipsSubviews="YES" userInteractionEnabled="NO" contentMode="scaleAspectFit" horizontalHuggingPriority="251" verticalHuggingPriority="251" image="Brighness.png" translatesAutoresizingMaskIntoConstraints="NO" id="Dhh-FJ-NIO">
<rect key="frame" x="0.0" y="0.0" width="105" height="150"/>
<constraints>
<constraint firstAttribute="width" secondItem="Dhh-FJ-NIO" secondAttribute="height" multiplier="1051:1501" id="jrY-2l-0ya"/>
</constraints>
</imageView>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="Bright Light" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="LEf-mb-EEw">
<rect key="frame" x="22" y="160" width="61.5" height="14.5"/>
<fontDescription key="fontDescription" type="system" pointSize="11"/>
<nil key="textColor"/>
<nil key="highlightedColor"/>
</label>
</subviews>
<color key="backgroundColor" systemColor="systemBackgroundColor" cocoaTouchSystemColor="whiteColor"/>
<constraints>
<constraint firstItem="Dhh-FJ-NIO" firstAttribute="top" secondItem="MrI-x8-Aaa" secondAttribute="top" id="48p-RI-bwB"/>
<constraint firstItem="Dhh-FJ-NIO" firstAttribute="leading" secondItem="MrI-x8-Aaa" secondAttribute="leading" id="4HQ-Fw-ged"/>
<constraint firstItem="LEf-mb-EEw" firstAttribute="top" secondItem="Dhh-FJ-NIO" secondAttribute="bottom" constant="10" id="GMR-Of-a6U"/>
<constraint firstAttribute="bottom" secondItem="LEf-mb-EEw" secondAttribute="bottom" constant="10" id="L12-78-Ny1"/>
<constraint firstItem="LEf-mb-EEw" firstAttribute="centerX" secondItem="Dhh-FJ-NIO" secondAttribute="centerX" id="SsU-F8-CON"/>
<constraint firstAttribute="trailing" secondItem="Dhh-FJ-NIO" secondAttribute="trailing" id="fBO-pe-gQ6"/>
</constraints>
</view>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="QZ4-kf-Ifk">
<rect key="frame" x="115" y="0.0" width="105" height="184.5"/>
<subviews>
<imageView clipsSubviews="YES" userInteractionEnabled="NO" contentMode="scaleAspectFit" horizontalHuggingPriority="251" verticalHuggingPriority="251" image="Glasses.png" translatesAutoresizingMaskIntoConstraints="NO" id="fwj-oM-0L2">
<rect key="frame" x="0.0" y="0.0" width="105" height="150"/>
<constraints>
<constraint firstAttribute="width" secondItem="fwj-oM-0L2" secondAttribute="height" multiplier="1051:1501" id="9fP-E7-kfu"/>
</constraints>
</imageView>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="No Glasses" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="OBc-kz-bve">
<rect key="frame" x="20.5" y="158" width="64" height="16.5"/>
<fontDescription key="fontDescription" type="system" pointSize="12"/>
<nil key="textColor"/>
<nil key="highlightedColor"/>
</label>
</subviews>
<color key="backgroundColor" systemColor="systemBackgroundColor" cocoaTouchSystemColor="whiteColor"/>
<constraints>
<constraint firstItem="OBc-kz-bve" firstAttribute="centerX" secondItem="fwj-oM-0L2" secondAttribute="centerX" id="ELr-hA-CFx"/>
<constraint firstItem="fwj-oM-0L2" firstAttribute="top" secondItem="QZ4-kf-Ifk" secondAttribute="top" id="FfR-nc-pOu"/>
<constraint firstItem="fwj-oM-0L2" firstAttribute="leading" secondItem="QZ4-kf-Ifk" secondAttribute="leading" id="N4L-eY-8zr"/>
<constraint firstItem="OBc-kz-bve" firstAttribute="top" secondItem="fwj-oM-0L2" secondAttribute="bottom" constant="8" id="Um8-fB-uKa"/>
<constraint firstAttribute="bottom" secondItem="OBc-kz-bve" secondAttribute="bottom" constant="10" id="Uzv-Eo-1NX"/>
<constraint firstAttribute="trailing" secondItem="fwj-oM-0L2" secondAttribute="trailing" id="e8R-6c-hmd"/>
</constraints>
</view>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="Ovc-eA-f7D">
<rect key="frame" x="230" y="0.0" width="105" height="184.5"/>
<subviews>
<imageView clipsSubviews="YES" userInteractionEnabled="NO" contentMode="scaleAspectFit" horizontalHuggingPriority="251" verticalHuggingPriority="251" image="Hat.png" translatesAutoresizingMaskIntoConstraints="NO" id="ALy-Nt-7ep">
<rect key="frame" x="0.0" y="0.0" width="105" height="150"/>
<constraints>
<constraint firstAttribute="width" secondItem="ALy-Nt-7ep" secondAttribute="height" multiplier="1051:1501" id="nnU-z5-j56"/>
</constraints>
</imageView>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="No Hat" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="0uW-uu-fti">
<rect key="frame" x="33" y="160" width="39.5" height="14.5"/>
<fontDescription key="fontDescription" type="system" pointSize="12"/>
<nil key="textColor"/>
<nil key="highlightedColor"/>
</label>
</subviews>
<color key="backgroundColor" systemColor="systemBackgroundColor" cocoaTouchSystemColor="whiteColor"/>
<constraints>
<constraint firstAttribute="trailing" secondItem="ALy-Nt-7ep" secondAttribute="trailing" id="2Xc-Q6-RVU"/>
<constraint firstItem="0uW-uu-fti" firstAttribute="top" secondItem="ALy-Nt-7ep" secondAttribute="bottom" constant="10" id="9jv-dK-PbZ"/>
<constraint firstItem="0uW-uu-fti" firstAttribute="centerX" secondItem="ALy-Nt-7ep" secondAttribute="centerX" id="Nbr-K2-0rN"/>
<constraint firstItem="ALy-Nt-7ep" firstAttribute="top" secondItem="Ovc-eA-f7D" secondAttribute="top" id="bgu-w9-gNP"/>
<constraint firstItem="ALy-Nt-7ep" firstAttribute="leading" secondItem="Ovc-eA-f7D" secondAttribute="leading" id="tiZ-o4-bS3"/>
<constraint firstAttribute="bottom" secondItem="0uW-uu-fti" secondAttribute="bottom" constant="10" id="wlR-0Q-niz"/>
</constraints>
</view>
</subviews>
<color key="tintColor" white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
</stackView>
</subviews>
<color key="backgroundColor" systemColor="systemBackgroundColor" cocoaTouchSystemColor="whiteColor"/>
<constraints>
<constraint firstItem="xhU-Sx-ZlE" firstAttribute="top" secondItem="L6w-YX-Eky" secondAttribute="top" id="EJ3-Pa-Vfk"/>
<constraint firstAttribute="trailing" secondItem="xhU-Sx-ZlE" secondAttribute="trailing" constant="20" id="UOl-ms-UoR"/>
<constraint firstItem="xhU-Sx-ZlE" firstAttribute="leading" secondItem="L6w-YX-Eky" secondAttribute="leading" constant="20" id="kbF-cL-6iC"/>
<constraint firstAttribute="bottom" secondItem="xhU-Sx-ZlE" secondAttribute="bottom" id="p4d-Mm-mPl"/>
</constraints>
</view>
</subviews>
</stackView>
<button opaque="NO" contentMode="scaleToFill" semanticContentAttribute="forceRightToLeft" contentHorizontalAlignment="center" contentVerticalAlignment="center" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="BHJ-nE-nba">
<rect key="frame" x="20" y="597" width="335" height="50"/>
<color key="backgroundColor" red="0.98330646749999995" green="0.60748988390000003" blue="0.17982774970000001" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<constraints>
<constraint firstAttribute="height" constant="50" id="J8Z-ab-Jvx"/>
</constraints>
<inset key="imageEdgeInsets" minX="10" minY="5" maxX="0.0" maxY="0.0"/>
<state key="normal" title="Let's go" image="Next.png">
<color key="titleColor" white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
</state>
<userDefinedRuntimeAttributes>
<userDefinedRuntimeAttribute type="number" keyPath="cornerRadius">
<integer key="value" value="25"/>
</userDefinedRuntimeAttribute>
</userDefinedRuntimeAttributes>
<connections>
<action selector="onStart:" destination="-1" eventType="touchUpInside" id="n3Y-GK-qN4"/>
</connections>
</button>
</subviews>
<color key="backgroundColor" white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<constraints>
<constraint firstItem="BHJ-nE-nba" firstAttribute="leading" secondItem="fnl-2z-Ty3" secondAttribute="leading" constant="20" id="3am-m6-fV6"/>
<constraint firstItem="fnl-2z-Ty3" firstAttribute="trailing" secondItem="BHJ-nE-nba" secondAttribute="trailing" constant="20" id="65h-CG-XxQ"/>
<constraint firstItem="fnl-2z-Ty3" firstAttribute="trailing" secondItem="OPE-9D-Ejt" secondAttribute="trailing" id="9WW-fL-VbV"/>
<constraint firstItem="DGc-C0-sHE" firstAttribute="leading" secondItem="fnl-2z-Ty3" secondAttribute="leading" id="AKh-9i-Jhj"/>
<constraint firstItem="fnl-2z-Ty3" firstAttribute="bottom" secondItem="BHJ-nE-nba" secondAttribute="bottom" constant="20" id="Ajy-ps-Jf6"/>
<constraint firstItem="BHJ-nE-nba" firstAttribute="top" secondItem="DGc-C0-sHE" secondAttribute="bottom" constant="30" id="Akf-dQ-xV0"/>
<constraint firstItem="fnl-2z-Ty3" firstAttribute="trailing" secondItem="DGc-C0-sHE" secondAttribute="trailing" id="HnH-mx-GHp"/>
<constraint firstItem="OPE-9D-Ejt" firstAttribute="leading" secondItem="fnl-2z-Ty3" secondAttribute="leading" id="On0-uh-zXl"/>
<constraint firstItem="DGc-C0-sHE" firstAttribute="top" secondItem="OPE-9D-Ejt" secondAttribute="bottom" constant="30" id="ijg-Kl-jEF"/>
<constraint firstItem="OPE-9D-Ejt" firstAttribute="top" secondItem="fnl-2z-Ty3" secondAttribute="top" id="yWy-NL-Ezq"/>
</constraints>
<viewLayoutGuide key="safeArea" id="fnl-2z-Ty3"/>
<point key="canvasLocation" x="130.40000000000001" y="123.68815592203899"/>
</view>
</objects>
<resources>
<image name="Back.png" width="31.5" height="53.5"/>
<image name="Brighness.png" width="525.5" height="750.5"/>
<image name="Glasses.png" width="525.5" height="750.5"/>
<image name="Hat.png" width="525.5" height="750.5"/>
<image name="Holdphone.png" width="922.5" height="734"/>
<image name="Next.png" width="30" height="15"/>
</resources>
</document>
// Copyright 2019 The TensorFlow Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// =============================================================================
import UIKit
import Accelerate
extension CVPixelBuffer {
/**
Returns thumbnail by cropping pixel buffer to biggest square and scaling the cropped image to
model dimensions.
*/
func centerThumbnail(ofSize size: CGSize ) -> CVPixelBuffer? {
let imageWidth = CVPixelBufferGetWidth(self)
let imageHeight = CVPixelBufferGetHeight(self)
let pixelBufferType = CVPixelBufferGetPixelFormatType(self)
assert(pixelBufferType == kCVPixelFormatType_32BGRA)
let inputImageRowBytes = CVPixelBufferGetBytesPerRow(self)
let imageChannels = 4
let thumbnailSize = min(imageWidth, imageHeight)
CVPixelBufferLockBaseAddress(self, CVPixelBufferLockFlags(rawValue: 0))
var originX = 0
var originY = 0
if imageWidth > imageHeight {
originX = (imageWidth - imageHeight) / 2
}
else {
originY = (imageHeight - imageWidth) / 2
}
// Finds the biggest square in the pixel buffer and advances rows based on it.
guard let inputBaseAddress = CVPixelBufferGetBaseAddress(self)?.advanced(
by: originY * inputImageRowBytes + originX * imageChannels) else {
return nil
}
// Gets vImage Buffer from input image
var inputVImageBuffer = vImage_Buffer(
data: inputBaseAddress, height: UInt(thumbnailSize), width: UInt(thumbnailSize),
rowBytes: inputImageRowBytes)
let thumbnailRowBytes = Int(size.width) * imageChannels
guard let thumbnailBytes = malloc(Int(size.height) * thumbnailRowBytes) else {
return nil
}
// Allocates a vImage buffer for thumbnail image.
var thumbnailVImageBuffer = vImage_Buffer(data: thumbnailBytes, height: UInt(size.height), width: UInt(size.width), rowBytes: thumbnailRowBytes)
// Performs the scale operation on input image buffer and stores it in thumbnail image buffer.
let scaleError = vImageScale_ARGB8888(&inputVImageBuffer, &thumbnailVImageBuffer, nil, vImage_Flags(0))
CVPixelBufferUnlockBaseAddress(self, CVPixelBufferLockFlags(rawValue: 0))
guard scaleError == kvImageNoError else {
return nil
}
let releaseCallBack: CVPixelBufferReleaseBytesCallback = {mutablePointer, pointer in
if let pointer = pointer {
free(UnsafeMutableRawPointer(mutating: pointer))
}
}
var thumbnailPixelBuffer: CVPixelBuffer?
// Converts the thumbnail vImage buffer to CVPixelBuffer
let conversionStatus = CVPixelBufferCreateWithBytes(
nil, Int(size.width), Int(size.height), pixelBufferType, thumbnailBytes,
thumbnailRowBytes, releaseCallBack, nil, nil, &thumbnailPixelBuffer)
guard conversionStatus == kCVReturnSuccess else {
free(thumbnailBytes)
return nil
}
return thumbnailPixelBuffer
}
static func buffer(from image: UIImage) -> CVPixelBuffer? {
let attrs = [
kCVPixelBufferCGImageCompatibilityKey: kCFBooleanTrue,
kCVPixelBufferCGBitmapContextCompatibilityKey: kCFBooleanTrue
] as CFDictionary
var pixelBuffer: CVPixelBuffer?
let status = CVPixelBufferCreate(kCFAllocatorDefault,
Int(image.size.width),
Int(image.size.height),
kCVPixelFormatType_32BGRA,
attrs,
&pixelBuffer)
guard let buffer = pixelBuffer, status == kCVReturnSuccess else {
return nil
}
CVPixelBufferLockBaseAddress(buffer, [])
defer { CVPixelBufferUnlockBaseAddress(buffer, []) }
let pixelData = CVPixelBufferGetBaseAddress(buffer)
let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
guard let context = CGContext(data: pixelData,
width: Int(image.size.width),
height: Int(image.size.height),
bitsPerComponent: 8,
bytesPerRow: CVPixelBufferGetBytesPerRow(buffer),
space: rgbColorSpace,
bitmapInfo: CGImageAlphaInfo.noneSkipLast.rawValue) else {
return nil
}
context.translateBy(x: 0, y: image.size.height)
context.scaleBy(x: 1.0, y: -1.0)
UIGraphicsPushContext(context)
image.draw(in: CGRect(x: 0, y: 0, width: image.size.width, height: image.size.height))
UIGraphicsPopContext()
return pixelBuffer
}
}
//
// ExtString.swift
// OCR-SDK
//
// Created by itsol on 5/26/20.
// Copyright © 2020 itsol. All rights reserved.
//
import Foundation
extension String {
func localized() -> String {
let bundle = Bundle(identifier: "itsol.OCR-SDK")
let basePath = bundle!.path(forResource: Global.language, ofType: "lproj")!
return Bundle(path: basePath)!.localizedString(forKey: self, value: "", table: nil)
}
}
//
// ExtUIColor.swift
// OCR-SDK
//
// Created by itsol on 6/10/20.
// Copyright © 2020 itsol. All rights reserved.
//
import Foundation
import UIKit
extension UIColor {
class func colorFromHexa(_ hex: String, alpha: CGFloat = 1.0) -> UIColor {
var cString: String = hex.trimmingCharacters(in: .whitespacesAndNewlines).uppercased()
if cString.hasPrefix("#") {
cString.remove(at: cString.startIndex)
}
if (cString.count) != 6 {
return UIColor.gray
}
var rgbValue: UInt32 = 0
Scanner(string: cString).scanHexInt32(&rgbValue)
return UIColor(
red: CGFloat((rgbValue & 0xFF0000) >> 16) / 255.0,
green: CGFloat((rgbValue & 0x00FF00) >> 8) / 255.0,
blue: CGFloat(rgbValue & 0x0000FF) / 255.0,
alpha: alpha
)
}
}
//
// ExtUiViewController.swift
// OCR-SDK
//
// Created by itsol on 5/18/20.
// Copyright © 2020 itsol. All rights reserved.
//
import Foundation
import UIKit
extension UIViewController {
//Hiển thị thông báo
//input: message: String
func showToast(message : String) {
let toastLabel = UILabel(frame: CGRect(x: self.view.frame.size.width/2 - 75, y: self.view.frame.size.height-100, width: 150, height: 35))
toastLabel.backgroundColor = UIColor.black.withAlphaComponent(0.6)
toastLabel.textColor = UIColor.white
toastLabel.textAlignment = .center;
toastLabel.font = UIFont(name: "Montserrat-Light", size: 12.0)
toastLabel.text = message
toastLabel.alpha = 1.0
toastLabel.layer.cornerRadius = 10;
toastLabel.clipsToBounds = true
self.view.addSubview(toastLabel)
UIView.animate(withDuration: 4.0, delay: 0.1, options: .curveEaseOut, animations: {
toastLabel.alpha = 0.0
}, completion: {(isCompleted) in
toastLabel.removeFromSuperview()
})
}
//kiểm tra ảnh sau khi chụp
//input: image: Data
//output: Bool
func validateImage(image: Data) -> Bool {
return true
}
}
//
// Global.swift
// OCR-SDK
//
// Created by itsol on 5/27/20.
// Copyright © 2020 itsol. All rights reserved.
//
import Foundation
struct Global {
static var language: String = "en"//"vi"
static var url_font: String = "http://sdk.sb.gotai.ml/process_card_front"
static var url_back: String = "http://sdk.sb.gotai.ml/process_card_back"
static var url_face: String = "http://sdk.sb.gotai.ml/call/predict_front_back_face"
static var header: [String: Any] = [:]
static var typeFace: String = "RECORD" //TAKEPHOTO
static var ratioPass: Float = 70.0
static var colorConfig: String = "#FFFFFF"
static var colorButtonNext: String = "#225F8D"
static var colorButtonBack: String = "#DBDBDD"
static var colorTextButtonNext: String = "#FFFFFF"
static var colorTextButtonBack: String = "#225F8D"
static var colorTextPreview: String = "#000000"
static var frontConfig: String = ""
static var copyright: String = "Copyright by ITSOL"
static var validateCardFront: Bool = false
static var step: String = "ALL"
static var colorButtonTutorial: String = "#2C3D51"
static var useFileCer: Bool = true
static var imageCard1: Data?
static var imageCard2: Data?
}
//
// Loadding.swift
// OCR-SDK
//
// Created by itsol on 5/18/20.
// Copyright © 2020 itsol. All rights reserved.
//
import Foundation
import UIKit
class Loading {
static let shared = Loading()
var loadingView = UIView()
var activityIndicator = UIActivityIndicatorView()
func showLoading(viewMain: UIViewController) {
DispatchQueue.main.async {
self.loadingView = UIView(frame: viewMain.view.frame)
self.loadingView.tag = 999999
self.loadingView.backgroundColor = UIColor.gray.withAlphaComponent(0.5)
self.activityIndicator.style = .white
self.activityIndicator.center = self.loadingView.center
self.activityIndicator.startAnimating()
self.loadingView.addSubview(self.activityIndicator)
viewMain.view.addSubview(self.loadingView)
}
}
func hideLoading(viewMain: UIViewController){
DispatchQueue.main.async {
UIView.animate(withDuration: 0.0, delay: 1.0, options: .curveEaseOut, animations: {
self.loadingView.alpha = 0.0
self.activityIndicator.stopAnimating()
}, completion: { finished in
self.activityIndicator.removeFromSuperview()
self.loadingView.removeFromSuperview()
let removeView = viewMain.view.viewWithTag(999999)
removeView?.removeFromSuperview()
})
}}
}
//
// Validate.swift
// OCR-SDK
//
// Created by itsol on 6/10/20.
// Copyright © 2020 itsol. All rights reserved.
//
import Foundation
import UIKit
import Vision
class SBKValidateInput {
static let shared = SBKValidateInput()
var modelDataHandler: SBKModelDataHandler? = SBKModelDataHandler(modelFileInfo: MobileNet.cardModel)
var modelDataFaceHandler: SBKModelDataHandler? = SBKModelDataHandler(modelFileInfo: MobileNet.modelInfo)
public typealias CompletionHandle = (_ data: Bool) -> Void
func validateCard(imageInput: UIImage) -> Int {
let ciimage = CIImage(image: imageInput)
let tmpcontext = CIContext(options: nil)
let cgimage = tmpcontext.createCGImage(ciimage!, from: ciimage!.extent)
let cvpixelImge = self.convertCGImgeToCVPixelBuffer(forImage: cgimage!)
let resultVaidate = self.didOutput(pixelBuffer: cvpixelImge!)
switch resultVaidate {
case .ERROR:
return -1
case .IMAGEFAKE:
return 0
case .IMAGEFRONT:
return 1
case .IMAGEBACK:
return 2
}
}
func validateFace(imageFace: CVPixelBuffer, completion: @escaping CompletionHandle) {
// let ciimage = CIImage(image: imageFace)
//
// let tmpcontext = CIContext(options: nil)
// let cgimage = tmpcontext.createCGImage(ciimage!, from: ciimage!.extent)
//
// let cvpixelImge = self.convertCGImgeToCVPixelBuffer(forImage: cgimage!)
self.detectFace(in: imageFace) { [weak self] data in
guard let `self` = self else { return }
let result = data && self.didOutputFace(pixelBuffer: imageFace)
completion(result)
}
}
func didOutput(pixelBuffer: CVPixelBuffer) -> ValidateCard {
var result: [Float]?
var previousInferenceTimeMs: TimeInterval = Date.distantPast.timeIntervalSince1970 * 1000
let delayBetweenInferencesMs: Double = 1000
let currentTimeMs = Date().timeIntervalSince1970 * 1000
guard (currentTimeMs - previousInferenceTimeMs) >= delayBetweenInferencesMs else { return .ERROR }
previousInferenceTimeMs = currentTimeMs
// Pass the pixel buffer to TensorFlow Lite to perform inference.
result = modelDataHandler?.runModel(onFrame: pixelBuffer)
if result![0] > result![1] && result![0] > result![2]{
return .IMAGEFAKE
} else if result![1] > result![0] && result![1] > result![2] {
return .IMAGEFRONT
} else {
return .IMAGEBACK
}
}
func didOutputFace(pixelBuffer: CVPixelBuffer) -> Bool {
var result: [Float]?
var previousInferenceTimeMs: TimeInterval = Date.distantPast.timeIntervalSince1970 * 1000
let delayBetweenInferencesMs: Double = 1000
let currentTimeMs = Date().timeIntervalSince1970 * 1000
guard (currentTimeMs - previousInferenceTimeMs) >= delayBetweenInferencesMs else { return false }
previousInferenceTimeMs = currentTimeMs
// Pass the pixel buffer to TensorFlow Lite to perform inference.
result = modelDataFaceHandler?.runModel(onFrame: pixelBuffer)
if result![0] < result![1] {
return true
} else {
return false
}
}
func comvertUIImageToCVPixel(imageInput: UIImage) -> CVPixelBuffer {
let ciimage = CIImage(image: imageInput)
let tmpcontext = CIContext(options: nil)
let cgimage = tmpcontext.createCGImage(ciimage!, from: ciimage!.extent)
return self.convertCGImgeToCVPixelBuffer(forImage: cgimage!)!
}
func convertCVPixelToUIImage(pixelBuffer: CVPixelBuffer) -> UIImage {
let ciimage : CIImage = CIImage(cvPixelBuffer: pixelBuffer)
let imageView : UIImage = self.convertCIToUIImage(cmage: ciimage)
return imageView
}
func convertCIToUIImage(cmage: CIImage) -> UIImage {
let context:CIContext = CIContext.init(options: nil)
let cgImage:CGImage = context.createCGImage(cmage, from: cmage.extent)!
let image:UIImage = UIImage.init(cgImage: cgImage)
return image
}
func convertCGImgeToCVPixelBuffer (forImage image: CGImage) -> CVPixelBuffer? {
let frameSize = CGSize(width: image.width, height: image.height)
var pixelBuffer:CVPixelBuffer? = nil
let status = CVPixelBufferCreate(kCFAllocatorDefault, Int(frameSize.width), Int(frameSize.height), kCVPixelFormatType_32BGRA , nil, &pixelBuffer)
if status != kCVReturnSuccess {
return nil
}
CVPixelBufferLockBaseAddress(pixelBuffer!, CVPixelBufferLockFlags.init(rawValue: 0))
let data = CVPixelBufferGetBaseAddress(pixelBuffer!)
let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
let bitmapInfo = CGBitmapInfo(rawValue: CGBitmapInfo.byteOrder32Little.rawValue | CGImageAlphaInfo.premultipliedFirst.rawValue)
let context = CGContext(data: data, width: Int(frameSize.width), height: Int(frameSize.height), bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(pixelBuffer!), space: rgbColorSpace, bitmapInfo: bitmapInfo.rawValue)
context?.draw(image, in: CGRect(x: 0, y: 0, width: image.width, height: image.height))
CVPixelBufferUnlockBaseAddress(pixelBuffer!, CVPixelBufferLockFlags(rawValue: 0))
return pixelBuffer
}
func detectFace(in image: CVPixelBuffer, completion: @escaping CompletionHandle) {
if #available(iOS 11.0, *) {
let faceDetectionRequest = VNDetectFaceLandmarksRequest(completionHandler: { (request: VNRequest, error: Error?) in
DispatchQueue.main.async {
if (request.results as? [VNFaceObservation]) != nil {
if let results = request.results as? [VNFaceObservation], results.count > 0 {
completion(true)
} else {
completion(false)
}
}
}
})
let imageRequestHandler = VNImageRequestHandler(cvPixelBuffer: image, orientation: .leftMirrored, options: [:])
try? imageRequestHandler.perform([faceDetectionRequest])
} else {
// Fallback on earlier versions
}
}
//Xử lý ảnh hiển thị
func cropImage(image: UIImage, rect: CGRect, scale: CGFloat) -> UIImage? {
let imageCap = image
let widthCrop = imageCap.size.width - imageCap.size.width / 10
UIGraphicsBeginImageContextWithOptions(CGSize(width: widthCrop, height: widthCrop * 3 / 4), true, 0.0)
image.draw(at: CGPoint(x: -rect.origin.x / scale, y: -rect.origin.y / scale))
let croppedImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return croppedImage
}
func cropImageHorizontal(image: UIImage, rect: CGRect, scale: CGFloat) -> UIImage? {
UIGraphicsBeginImageContextWithOptions(CGSize(width: rect.width, height: rect.height), true, 0.0)
image.draw(at: CGPoint(x: -rect.origin.x / scale, y: -rect.origin.y / scale))
let croppedImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return croppedImage
}
func cropImageFace(image: UIImage, rect: CGRect, scale: CGFloat) -> UIImage? {
let imageCap = image
let widthCrop = imageCap.size.width - imageCap.size.width / 4
UIGraphicsBeginImageContextWithOptions(CGSize(width: widthCrop, height: widthCrop * 4 / 3), true, 0.0)
image.draw(at: CGPoint(x: -rect.origin.x / scale, y: -rect.origin.y / scale))
let croppedImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return croppedImage
}
}
enum ValidateCard: String {
case ERROR
case IMAGEFAKE
case IMAGEFRONT
case IMAGEBACK
}
/*
Localizable.strings
OCR-SDK
Created by itsol on 5/26/20.
Copyright © 2020 itsol. All rights reserved.
*/
"Place it within the box"="Place it within the box";
"Do not place outside"="Do not place outside";
"Avoid glare"="Avoid glare";
"Front of your personal card"="Front of your personal card";
//
// OCRRequest.swift
// SSSSS
//
// Created by itsol on 5/12/20.
// Copyright © 2020 itsol. All rights reserved.
//
import Foundation
class SBOCRRequest: NSObject{
public typealias CompletionHandle = (_ errorMess : String?, _ data: [String: Any]?) -> Void
static let shared = SBOCRRequest()
let certificates: [Data] = {
let bundle = Bundle(identifier: "itsol.OCR-SDK")
let url = bundle!.url(forResource: "objcio", withExtension: "cer")!
let data = try! Data(contentsOf: url)
return [data]
}()
//Call Api card front và card back
//input: image
//output: data[String: Any]
func processCardFont(image: Data, pathURL: String, completion: @escaping CompletionHandle){
var request = URLRequest(url: URL(string: pathURL)!)
request.httpMethod = "POST"
if !Global.header.isEmpty {
for (key, value) in Global.header {
request.setValue(value as? String, forHTTPHeaderField: key)
}
}
let boundary = generateBoundaryString()
request.setValue("multipart/form-data; boundary=\(boundary)", forHTTPHeaderField: "Content-Type")
let imageData = image
request.httpBody = createBodyWithParameters(filePathKey: "image", imageDataKey: imageData as NSData, boundary: boundary) as Data
let urlSession = URLSession(configuration: URLSessionConfiguration.default, delegate: self, delegateQueue: nil)
let task = urlSession.dataTask(with: request, completionHandler: { (data, _, error) -> Void in
var errorMessage = ""
var jsonBody = [String: Any]()
self.processJsonBody(errorMessage: &errorMessage, withJsonBody: &jsonBody, dataResponse: data, errorResponse: error)
guard errorMessage.count == 0 else {
completion(errorMessage,nil)
return
}
completion(nil,jsonBody)
})
task.resume()
}
//Call Api ProcessFace
//input: image, idBack, idFront
//output: data[String: Any]
func processFace(image: Data, pathURL: String, idBack: String, idFront: String, completion: @escaping CompletionHandle){
var params: [String: String] = [
"id_back": idBack,
"id_front": idFront,
]
if idBack == "" {
params.removeValue(forKey: "id_back")
}
let urlPath = URL(string: pathURL)!
var request = URLRequest(url: urlPath)
print("request====", urlPath, "==", request)
request.httpMethod = "POST"
if !Global.header.isEmpty {
for (key, value) in Global.header {
request.setValue(value as? String, forHTTPHeaderField: key)
}
}
let boundary = generateBoundaryString()
request.setValue("multipart/form-data; boundary=\(boundary)", forHTTPHeaderField: "Content-Type")
let imageData = image
request.httpBody = createBodyWithParameters(parameters: params, filePathKey: "image_general", imageDataKey: imageData as NSData, boundary: boundary) as Data
let urlSession = URLSession(configuration: URLSessionConfiguration.default, delegate: self, delegateQueue: nil)
let task = urlSession.dataTask(with: request, completionHandler: { (data, respone, error) -> Void in
var errorMessage = ""
var jsonBody = [String: Any]()
self.processJsonBody(errorMessage: &errorMessage, withJsonBody: &jsonBody, dataResponse: data, errorResponse: error)
guard errorMessage.count == 0 else {
completion(errorMessage,nil)
return
}
completion(nil,jsonBody)
})
task.resume()
}
func createBodyWithParameters(parameters: [String: String] = [:], filePathKey: String?, imageDataKey: NSData, boundary: String) -> NSData {
let body = NSMutableData();
for (key, value) in parameters {
body.appendString(string: "--\(boundary)\r\n")
body.appendString(string: "Content-Disposition: form-data; name=\"\(key)\"\r\n\r\n")
body.appendString(string: "\(value)\r\n")
}
let face = "face.jpg"
let card1 = "cardFront.jpg"
let card2 = "cardBack.jpg"
let filePathKeyCard1 = "image_card1"
let filePathKeyCard2 = "image_card2"
let mimetype = "image/jpg"
body.appendString(string: "--\(boundary)\r\n")
body.appendString(string: "Content-Disposition: form-data; name=\"\(filePathKey!)\"; filename=\"\(face)\"\r\n")
body.appendString(string: "Content-Type: \(mimetype)\r\n\r\n")
body.append(imageDataKey as Data)
body.appendString(string: "\r\n")
if Global.imageCard1 != nil {
body.appendString(string: "--\(boundary)\r\n")
body.appendString(string: "Content-Disposition: form-data; name=\"\(filePathKeyCard1)\"; filename=\"\(card1)\"\r\n")
body.appendString(string: "Content-Type: \(mimetype)\r\n\r\n")
body.append(Global.imageCard1!)
body.appendString(string: "\r\n")
}
if Global.imageCard2 != nil {
body.appendString(string: "--\(boundary)\r\n")
body.appendString(string: "Content-Disposition: form-data; name=\"\(filePathKeyCard2)\"; filename=\"\(card2)\"\r\n")
body.appendString(string: "Content-Type: \(mimetype)\r\n\r\n")
body.append(Global.imageCard2!)
body.appendString(string: "\r\n")
}
body.appendString(string: "--\(boundary)--\r\n")
return body
}
func generateBoundaryString() -> String {
return "Boundary-\(NSUUID().uuidString)"
}
private func processJsonBody (errorMessage errorMess: inout String,
withJsonBody jsonResult: inout [String: Any],
dataResponse data: Data?,
errorResponse error: Error? ) {
guard error == nil else {
errorMess = "\((error! as NSError).code)"
return
}
guard data != nil else {
errorMess = "Error: Did not receive data"
return
}
//let str = String(decoding: data!, as: UTF8.self)
guard let _jsonResult = try? JSONSerialization.jsonObject(with: data!, options: JSONSerialization.ReadingOptions.mutableContainers) as? [String: Any] else {
errorMess = "Error: data can not convert to jsonObject"
return
}
jsonResult = _jsonResult
}
}
extension SBOCRRequest: URLSessionDelegate {
func urlSession(_ session: URLSession, didReceive challenge: URLAuthenticationChallenge, completionHandler: @escaping (URLSession.AuthChallengeDisposition, URLCredential?) -> Void) {
print("urlsession====", Global.useFileCer)
if let trust = challenge.protectionSpace.serverTrust, SecTrustGetCertificateCount(trust) > 0, Global.useFileCer {
if let certificate = SecTrustGetCertificateAtIndex(trust, 0) {
let data = SecCertificateCopyData(certificate) as Data
if certificates.contains(data) {
completionHandler(.useCredential, URLCredential(trust: trust))
return
}
}
}
completionHandler(.cancelAuthenticationChallenge, nil)
}
}
extension NSMutableData {
func appendString(string: String) {
let data = string.data(using: String.Encoding.utf8, allowLossyConversion: true)
append(data!)
}
}
/*
Localizable.strings
OCR-SDK
Created by itsol on 5/26/20.
Copyright © 2020 itsol. All rights reserved.
*/
"Place it within the box"="Đặt đúng vị trí";
"Do not place outside"="Không đặt bên ngoài";
"Avoid glare"="Tránh ánh sáng chói";
"Position your face in the oval" = "Đặt vị trí mặt bạn vào hình";
"Front of your personal card" = "Mặt trước thẻ cá nhân của bạn";
"Invalid image, please check again!" = "Hình ảnh không hợp lệ, vui lòng kiểm tra lại!";
"Make sure your selfie clearly shows your face" = "Hãy chắc chắn rằng ảnh của bạn hiển thị rõ ràng khuôn mặt của bạn";
"Confirm my selfie" = "Tôi thấy ổn rồi";
"Take a new selfie" = "Chụp lại ảnh mới";
"Back of your personal card" = "Mặt sau thẻ cá nhân của bạn";
"Make sure your license details are clear to read, with no blur or glare" = "Đảm bảo chi tiết giấy phép của bạn rõ ràng để đọc, không bị mờ hoặc lóa";
"Take a new picture" = "Chụp lại ảnh mới";
"My license is readable" = "Tôi thấy ổn rồi";
"Proceed to Capture ID" = "Bắt đầu chụp thẻ";
"Good lighting on your face" = "Ánh sáng tốt trên khuôn mặt của bạn";
"Hold phone in front of you" = "Giữ điện thoại trước mặt bạn";
"Bright Light" = "ánh sáng";
"No Hat" = "Không đội mũ";
"No Glasses" = "Không đeo kính";
"Proceed to Take Selfie" = "Bắt đầu chụp ảnh";
"Incorrect face, please check!" = "Khuôn mặt không chính xác, xin vui lòng kiểm tra!";
"Are you ready. Let's start!" = "Bạn đã sẵn sàng. Hãy bắt đầu!";
"Please look straight" = "Vui lòng nhìn thẳng";
"Please turn to the left" = "Quay sang trái";
"Please turn to the right" = "Quay sang phải";
"Unsatisfactory, please try again!" = "Chưa đạt yêu cầu, hãy thử lại!";
"Back" = "Trở về";
"Incorrect card, please check!" = "Thẻ không chính xác, xin vui lòng kiểm tra!";
"Please put the back of the card in" = "Vui lòng đặt mặt sau của thẻ vào";
"Please put the front of the card in" = "Vui lòng đặt mặt trước của thẻ vào";
"Exactly" = "Chính xác";
"There are many faces in the frame" = "Có nhiều khuôn mặt trong khung hình";
# Uncomment the next line to define a global platform for your project
#platform :ios, '10.0'
target 'OCR-SDK' do
# Comment the next line if you don't want to use dynamic frameworks
#use_modular_headers!
use_frameworks!
# Pods for OCR-SDK
pod 'TensorFlowLiteSwift'
#pod 'GoogleMobileVision/FaceDetector'
#pod 'GTMSessionFetcher'
end
PODS:
- TensorFlowLiteC (2.2.0)
- TensorFlowLiteSwift (2.2.0):
- TensorFlowLiteC (= 2.2.0)
DEPENDENCIES:
- TensorFlowLiteSwift
SPEC REPOS:
trunk:
- TensorFlowLiteC
- TensorFlowLiteSwift
SPEC CHECKSUMS:
TensorFlowLiteC: b3ab9e867b0b71052ca102a32a786555b330b02e
TensorFlowLiteSwift: 2dd5e9c895e1819501f0fba3d8b69a536bda6c65
PODFILE CHECKSUM: 8c4b84d229daab67aa0a162f14470a2461899c16
COCOAPODS: 1.9.1
PODS:
- TensorFlowLiteC (2.2.0)
- TensorFlowLiteSwift (2.2.0):
- TensorFlowLiteC (= 2.2.0)
DEPENDENCIES:
- TensorFlowLiteSwift
SPEC REPOS:
trunk:
- TensorFlowLiteC
- TensorFlowLiteSwift
SPEC CHECKSUMS:
TensorFlowLiteC: b3ab9e867b0b71052ca102a32a786555b330b02e
TensorFlowLiteSwift: 2dd5e9c895e1819501f0fba3d8b69a536bda6c65
PODFILE CHECKSUM: 8c4b84d229daab67aa0a162f14470a2461899c16
COCOAPODS: 1.9.1
// !$*UTF8*$!
{
archiveVersion = 1;
classes = {
};
objectVersion = 51;
objects = {
/* Begin PBXAggregateTarget section */
AC559E53E13B6FBEF4F5CC310A73AFE6 /* TensorFlowLiteC */ = {
isa = PBXAggregateTarget;
buildConfigurationList = 243169D7A56C0468A9C10463619D018A /* Build configuration list for PBXAggregateTarget "TensorFlowLiteC" */;
buildPhases = (
);
dependencies = (
);
name = TensorFlowLiteC;
};
/* End PBXAggregateTarget section */
/* Begin PBXBuildFile section */
0E993296CF79F75FB6EA210A5AC531FD /* MetalDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1AA18C519BBA17643801F0059C8ABD2F /* MetalDelegate.swift */; };
30806718C80ACFD657320ADD85869F3A /* QuantizationParameters.swift in Sources */ = {isa = PBXBuildFile; fileRef = 85E54EA83E3D741A1C97008B191DEDCB /* QuantizationParameters.swift */; };
405DB926AB8462D8F5F96C5576BD24F1 /* TensorFlowLite.swift in Sources */ = {isa = PBXBuildFile; fileRef = A5749A71B8B9BE3A1796E2F4C7F81F2B /* TensorFlowLite.swift */; };
5355876923682A25B8B38A471BEB1E37 /* Foundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 3212113385A8FBBDB272BD23C409FF61 /* Foundation.framework */; };
572E5CE9AFA489B283A8046B5F4C5085 /* Pods-OCR-SDK-umbrella.h in Headers */ = {isa = PBXBuildFile; fileRef = FFF9238C69E5DF755D1644DCB1F71162 /* Pods-OCR-SDK-umbrella.h */; settings = {ATTRIBUTES = (Public, ); }; };
89AA3C20F56E771C5934B2DCAA379CC4 /* Interpreter.swift in Sources */ = {isa = PBXBuildFile; fileRef = B9F14F8FDFC382CFC76EF3DF9AE89174 /* Interpreter.swift */; };
8DA7D5FE089801A38D7226BB422A3AE9 /* TensorFlowLiteSwift-dummy.m in Sources */ = {isa = PBXBuildFile; fileRef = 26C4BC578385EDC42291BE039C542771 /* TensorFlowLiteSwift-dummy.m */; };
9966F63F2570886C63A9B4576A355A95 /* Pods-OCR-SDK-dummy.m in Sources */ = {isa = PBXBuildFile; fileRef = 319B0ACCE7ECB421BFA606DFCBD0F6CA /* Pods-OCR-SDK-dummy.m */; };
B30DFF9007000B08CA3613C616943724 /* Delegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 68D196D2D02DB0243857616CFBC828D0 /* Delegate.swift */; };
B4CDABD81CAD2F2287406F58F12242B0 /* TensorFlowLiteSwift-umbrella.h in Headers */ = {isa = PBXBuildFile; fileRef = FD879A3B7FBF0BD96966E3C9AB12B97F /* TensorFlowLiteSwift-umbrella.h */; settings = {ATTRIBUTES = (Public, ); }; };
B9BD81110588FF8EF267BD6647951BD4 /* Model.swift in Sources */ = {isa = PBXBuildFile; fileRef = 89DEEE5839289BAF4A77F053163A252B /* Model.swift */; };
BB252E9CA67FCAAD352F777576F14A23 /* Foundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 3212113385A8FBBDB272BD23C409FF61 /* Foundation.framework */; };
E677E11481A8D892C303DCE89B807BA6 /* Tensor.swift in Sources */ = {isa = PBXBuildFile; fileRef = 62B0658433A1DA500B01381C2A29CA55 /* Tensor.swift */; };
EE7899063120392B7E2BECDA0C8D3086 /* InterpreterError.swift in Sources */ = {isa = PBXBuildFile; fileRef = 6AF18535EFDC612966854044FD570764 /* InterpreterError.swift */; };
/* End PBXBuildFile section */
/* Begin PBXContainerItemProxy section */
22D64D58AC950A742268FDB71854F742 /* PBXContainerItemProxy */ = {
isa = PBXContainerItemProxy;
containerPortal = BFDFE7DC352907FC980B868725387E98 /* Project object */;
proxyType = 1;
remoteGlobalIDString = 10418167F619D6DA72BADAD10F9EC02B;
remoteInfo = TensorFlowLiteSwift;
};
2BE2C1AFBCE2BDF286D202C2A3861184 /* PBXContainerItemProxy */ = {
isa = PBXContainerItemProxy;
containerPortal = BFDFE7DC352907FC980B868725387E98 /* Project object */;
proxyType = 1;
remoteGlobalIDString = AC559E53E13B6FBEF4F5CC310A73AFE6;
remoteInfo = TensorFlowLiteC;
};
9CD29C43799CE27402701E8FDBF5CACF /* PBXContainerItemProxy */ = {
isa = PBXContainerItemProxy;
containerPortal = BFDFE7DC352907FC980B868725387E98 /* Project object */;
proxyType = 1;
remoteGlobalIDString = AC559E53E13B6FBEF4F5CC310A73AFE6;
remoteInfo = TensorFlowLiteC;
};
/* End PBXContainerItemProxy section */
/* Begin PBXFileReference section */
195023F1232F59DC7609EC8B27ABF4CC /* TensorFlowLiteC.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; path = TensorFlowLiteC.debug.xcconfig; sourceTree = "<group>"; };
1AA18C519BBA17643801F0059C8ABD2F /* MetalDelegate.swift */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.swift; name = MetalDelegate.swift; path = tensorflow/lite/experimental/swift/Sources/MetalDelegate.swift; sourceTree = "<group>"; };
26C4BC578385EDC42291BE039C542771 /* TensorFlowLiteSwift-dummy.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; path = "TensorFlowLiteSwift-dummy.m"; sourceTree = "<group>"; };
319B0ACCE7ECB421BFA606DFCBD0F6CA /* Pods-OCR-SDK-dummy.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; path = "Pods-OCR-SDK-dummy.m"; sourceTree = "<group>"; };
3212113385A8FBBDB272BD23C409FF61 /* Foundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Foundation.framework; path = Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS12.2.sdk/System/Library/Frameworks/Foundation.framework; sourceTree = DEVELOPER_DIR; };
4DEF9604B3A10391246BB01C3B360192 /* TensorFlowLiteSwift.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; path = TensorFlowLiteSwift.release.xcconfig; sourceTree = "<group>"; };
5131EA118CFE71670689AFC2E40810BB /* Pods_OCR_SDK.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; name = Pods_OCR_SDK.framework; path = "Pods-OCR-SDK.framework"; sourceTree = BUILT_PRODUCTS_DIR; };
62B0658433A1DA500B01381C2A29CA55 /* Tensor.swift */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.swift; name = Tensor.swift; path = tensorflow/lite/experimental/swift/Sources/Tensor.swift; sourceTree = "<group>"; };
68D196D2D02DB0243857616CFBC828D0 /* Delegate.swift */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.swift; name = Delegate.swift; path = tensorflow/lite/experimental/swift/Sources/Delegate.swift; sourceTree = "<group>"; };
6AF18535EFDC612966854044FD570764 /* InterpreterError.swift */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.swift; name = InterpreterError.swift; path = tensorflow/lite/experimental/swift/Sources/InterpreterError.swift; sourceTree = "<group>"; };
7DC911E4735E7FF695F58024B3BA19FC /* TensorFlowLiteC.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; path = TensorFlowLiteC.release.xcconfig; sourceTree = "<group>"; };
85E54EA83E3D741A1C97008B191DEDCB /* QuantizationParameters.swift */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.swift; name = QuantizationParameters.swift; path = tensorflow/lite/experimental/swift/Sources/QuantizationParameters.swift; sourceTree = "<group>"; };
89DEEE5839289BAF4A77F053163A252B /* Model.swift */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.swift; name = Model.swift; path = tensorflow/lite/experimental/swift/Sources/Model.swift; sourceTree = "<group>"; };
9D940727FF8FB9C785EB98E56350EF41 /* Podfile */ = {isa = PBXFileReference; explicitFileType = text.script.ruby; includeInIndex = 1; indentWidth = 2; lastKnownFileType = text; name = Podfile; path = ../Podfile; sourceTree = SOURCE_ROOT; tabWidth = 2; xcLanguageSpecificationIdentifier = xcode.lang.ruby; };
A5060FDF49BFDF183010E92441345498 /* TensorFlowLiteC.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = TensorFlowLiteC.framework; path = Frameworks/TensorFlowLiteC.framework; sourceTree = "<group>"; };
A5749A71B8B9BE3A1796E2F4C7F81F2B /* TensorFlowLite.swift */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.swift; name = TensorFlowLite.swift; path = tensorflow/lite/experimental/swift/Sources/TensorFlowLite.swift; sourceTree = "<group>"; };
AED476478C959569CFCC3DF9E47408C5 /* Pods-OCR-SDK.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; path = "Pods-OCR-SDK.debug.xcconfig"; sourceTree = "<group>"; };
B303EFFF1FC874FA6CED46B45A292FB8 /* Pods-OCR-SDK-acknowledgements.plist */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.plist.xml; path = "Pods-OCR-SDK-acknowledgements.plist"; sourceTree = "<group>"; };
B9F14F8FDFC382CFC76EF3DF9AE89174 /* Interpreter.swift */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.swift; name = Interpreter.swift; path = tensorflow/lite/experimental/swift/Sources/Interpreter.swift; sourceTree = "<group>"; };
BE006A1273F8AB1A760841C70B8ECC17 /* TensorFlowLiteSwift-prefix.pch */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; path = "TensorFlowLiteSwift-prefix.pch"; sourceTree = "<group>"; };
CD35CA67D5609CA37E90AB1EB079DA0E /* Pods-OCR-SDK-acknowledgements.markdown */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text; path = "Pods-OCR-SDK-acknowledgements.markdown"; sourceTree = "<group>"; };
CF82B8C58A0FD821537E3660EAB99FAB /* TensorFlowLiteSwift.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; path = TensorFlowLiteSwift.debug.xcconfig; sourceTree = "<group>"; };
DADD9C83FCF7E2F0C4969D06D0E88B94 /* TensorFlowLiteSwift.modulemap */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.module; path = TensorFlowLiteSwift.modulemap; sourceTree = "<group>"; };
E0B71FE13AA6B05010059EB4B8D87919 /* TensorFlowLite.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; name = TensorFlowLite.framework; path = TensorFlowLiteSwift.framework; sourceTree = BUILT_PRODUCTS_DIR; };
E4EEE4038E7A669620B485A8C87E940B /* TensorFlowLiteSwift-Info.plist */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.plist.xml; path = "TensorFlowLiteSwift-Info.plist"; sourceTree = "<group>"; };
E6111D41D5230B9B2A5C44624B29EA42 /* Pods-OCR-SDK.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; path = "Pods-OCR-SDK.release.xcconfig"; sourceTree = "<group>"; };
F3B636B674AD0C3233D0DBB52FB64E70 /* Pods-OCR-SDK.modulemap */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.module; path = "Pods-OCR-SDK.modulemap"; sourceTree = "<group>"; };
F8FF339CAAA73550A8B6730FB164563D /* Pods-OCR-SDK-Info.plist */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.plist.xml; path = "Pods-OCR-SDK-Info.plist"; sourceTree = "<group>"; };
FD879A3B7FBF0BD96966E3C9AB12B97F /* TensorFlowLiteSwift-umbrella.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; path = "TensorFlowLiteSwift-umbrella.h"; sourceTree = "<group>"; };
FFF9238C69E5DF755D1644DCB1F71162 /* Pods-OCR-SDK-umbrella.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; path = "Pods-OCR-SDK-umbrella.h"; sourceTree = "<group>"; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
54793BD37B24B667FDD324D8068F5119 /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
5355876923682A25B8B38A471BEB1E37 /* Foundation.framework in Frameworks */,
);
runOnlyForDeploymentPostprocessing = 0;
};
C6CF52B5378499A6CEB8CFEB84955DDE /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
BB252E9CA67FCAAD352F777576F14A23 /* Foundation.framework in Frameworks */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXFrameworksBuildPhase section */
/* Begin PBXGroup section */
17167E5FF53D40FA62C5145F1B07C18F /* Pods-OCR-SDK */ = {
isa = PBXGroup;
children = (
F3B636B674AD0C3233D0DBB52FB64E70 /* Pods-OCR-SDK.modulemap */,
CD35CA67D5609CA37E90AB1EB079DA0E /* Pods-OCR-SDK-acknowledgements.markdown */,
B303EFFF1FC874FA6CED46B45A292FB8 /* Pods-OCR-SDK-acknowledgements.plist */,
319B0ACCE7ECB421BFA606DFCBD0F6CA /* Pods-OCR-SDK-dummy.m */,
F8FF339CAAA73550A8B6730FB164563D /* Pods-OCR-SDK-Info.plist */,
FFF9238C69E5DF755D1644DCB1F71162 /* Pods-OCR-SDK-umbrella.h */,
AED476478C959569CFCC3DF9E47408C5 /* Pods-OCR-SDK.debug.xcconfig */,
E6111D41D5230B9B2A5C44624B29EA42 /* Pods-OCR-SDK.release.xcconfig */,
);
name = "Pods-OCR-SDK";
path = "Target Support Files/Pods-OCR-SDK";
sourceTree = "<group>";
};
1CAE4DFA30976CB39FF45A59917E2812 /* Pods */ = {
isa = PBXGroup;
children = (
6661CAE509B528212186DCE3A5F21F03 /* TensorFlowLiteC */,
A60D71B0C504F0801CA7AF415D647168 /* TensorFlowLiteSwift */,
);
name = Pods;
sourceTree = "<group>";
};
5017E107FDF6AB6F0B29C3F523D6EA81 /* Products */ = {
isa = PBXGroup;
children = (
5131EA118CFE71670689AFC2E40810BB /* Pods_OCR_SDK.framework */,
E0B71FE13AA6B05010059EB4B8D87919 /* TensorFlowLite.framework */,
);
name = Products;
sourceTree = "<group>";
};
56FDD2F0EC6001AEBBCE1A2B224A3117 /* Support Files */ = {
isa = PBXGroup;
children = (
195023F1232F59DC7609EC8B27ABF4CC /* TensorFlowLiteC.debug.xcconfig */,
7DC911E4735E7FF695F58024B3BA19FC /* TensorFlowLiteC.release.xcconfig */,
);
name = "Support Files";
path = "../Target Support Files/TensorFlowLiteC";
sourceTree = "<group>";
};
6661CAE509B528212186DCE3A5F21F03 /* TensorFlowLiteC */ = {
isa = PBXGroup;
children = (
FE52148DC6DE55C0D03EB75D362940B6 /* Frameworks */,
56FDD2F0EC6001AEBBCE1A2B224A3117 /* Support Files */,
);
name = TensorFlowLiteC;
path = TensorFlowLiteC;
sourceTree = "<group>";
};
91D362ED26DBFCB0F7D03D165E0B9F45 /* Support Files */ = {
isa = PBXGroup;
children = (
DADD9C83FCF7E2F0C4969D06D0E88B94 /* TensorFlowLiteSwift.modulemap */,
26C4BC578385EDC42291BE039C542771 /* TensorFlowLiteSwift-dummy.m */,
E4EEE4038E7A669620B485A8C87E940B /* TensorFlowLiteSwift-Info.plist */,
BE006A1273F8AB1A760841C70B8ECC17 /* TensorFlowLiteSwift-prefix.pch */,
FD879A3B7FBF0BD96966E3C9AB12B97F /* TensorFlowLiteSwift-umbrella.h */,
CF82B8C58A0FD821537E3660EAB99FAB /* TensorFlowLiteSwift.debug.xcconfig */,
4DEF9604B3A10391246BB01C3B360192 /* TensorFlowLiteSwift.release.xcconfig */,
);
name = "Support Files";
path = "../Target Support Files/TensorFlowLiteSwift";
sourceTree = "<group>";
};
A60D71B0C504F0801CA7AF415D647168 /* TensorFlowLiteSwift */ = {
isa = PBXGroup;
children = (
68D196D2D02DB0243857616CFBC828D0 /* Delegate.swift */,
B9F14F8FDFC382CFC76EF3DF9AE89174 /* Interpreter.swift */,
6AF18535EFDC612966854044FD570764 /* InterpreterError.swift */,
1AA18C519BBA17643801F0059C8ABD2F /* MetalDelegate.swift */,
89DEEE5839289BAF4A77F053163A252B /* Model.swift */,
85E54EA83E3D741A1C97008B191DEDCB /* QuantizationParameters.swift */,
62B0658433A1DA500B01381C2A29CA55 /* Tensor.swift */,
A5749A71B8B9BE3A1796E2F4C7F81F2B /* TensorFlowLite.swift */,
91D362ED26DBFCB0F7D03D165E0B9F45 /* Support Files */,
);
name = TensorFlowLiteSwift;
path = TensorFlowLiteSwift;
sourceTree = "<group>";
};
C0834CEBB1379A84116EF29F93051C60 /* iOS */ = {
isa = PBXGroup;
children = (
3212113385A8FBBDB272BD23C409FF61 /* Foundation.framework */,
);
name = iOS;
sourceTree = "<group>";
};
C19DCC7894F44B18AC35116DD3CDECDA /* Targets Support Files */ = {
isa = PBXGroup;
children = (
17167E5FF53D40FA62C5145F1B07C18F /* Pods-OCR-SDK */,
);
name = "Targets Support Files";
sourceTree = "<group>";
};
CF1408CF629C7361332E53B88F7BD30C = {
isa = PBXGroup;
children = (
9D940727FF8FB9C785EB98E56350EF41 /* Podfile */,
D210D550F4EA176C3123ED886F8F87F5 /* Frameworks */,
1CAE4DFA30976CB39FF45A59917E2812 /* Pods */,
5017E107FDF6AB6F0B29C3F523D6EA81 /* Products */,
C19DCC7894F44B18AC35116DD3CDECDA /* Targets Support Files */,
);
sourceTree = "<group>";
};
D210D550F4EA176C3123ED886F8F87F5 /* Frameworks */ = {
isa = PBXGroup;
children = (
C0834CEBB1379A84116EF29F93051C60 /* iOS */,
);
name = Frameworks;
sourceTree = "<group>";
};
FE52148DC6DE55C0D03EB75D362940B6 /* Frameworks */ = {
isa = PBXGroup;
children = (
A5060FDF49BFDF183010E92441345498 /* TensorFlowLiteC.framework */,
);
name = Frameworks;
sourceTree = "<group>";
};
/* End PBXGroup section */
/* Begin PBXHeadersBuildPhase section */
2A1AD35091B61712ED5BA004BABD19C5 /* Headers */ = {
isa = PBXHeadersBuildPhase;
buildActionMask = 2147483647;
files = (
572E5CE9AFA489B283A8046B5F4C5085 /* Pods-OCR-SDK-umbrella.h in Headers */,
);
runOnlyForDeploymentPostprocessing = 0;
};
78208D4284DA5460EE036B234B831B9D /* Headers */ = {
isa = PBXHeadersBuildPhase;
buildActionMask = 2147483647;
files = (
B4CDABD81CAD2F2287406F58F12242B0 /* TensorFlowLiteSwift-umbrella.h in Headers */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXHeadersBuildPhase section */
/* Begin PBXNativeTarget section */
10418167F619D6DA72BADAD10F9EC02B /* TensorFlowLiteSwift */ = {
isa = PBXNativeTarget;
buildConfigurationList = 8BEE4D4068BDD06E26CA520708ABB5DF /* Build configuration list for PBXNativeTarget "TensorFlowLiteSwift" */;
buildPhases = (
78208D4284DA5460EE036B234B831B9D /* Headers */,
B5E4027CF6A0738C877D048A2F15F3DC /* Sources */,
C6CF52B5378499A6CEB8CFEB84955DDE /* Frameworks */,
679E7170D73F96A9F6F4AF34B095DD4A /* Resources */,
);
buildRules = (
);
dependencies = (
9C7C0C7FC1E8F36E403A9DA58D08A654 /* PBXTargetDependency */,
);
name = TensorFlowLiteSwift;
productName = TensorFlowLiteSwift;
productReference = E0B71FE13AA6B05010059EB4B8D87919 /* TensorFlowLite.framework */;
productType = "com.apple.product-type.framework";
};
DE1F4D51AD94C30627575AEE202FD099 /* Pods-OCR-SDK */ = {
isa = PBXNativeTarget;
buildConfigurationList = 21F7D35700D8C328AA9B369387D85320 /* Build configuration list for PBXNativeTarget "Pods-OCR-SDK" */;
buildPhases = (
2A1AD35091B61712ED5BA004BABD19C5 /* Headers */,
65F0B64B43BB6554ED14755D9819F62D /* Sources */,
54793BD37B24B667FDD324D8068F5119 /* Frameworks */,
A2C04265C83435C3D235A35531432359 /* Resources */,
);
buildRules = (
);
dependencies = (
2EA7F772E5D8762DCB4049AD1E86FAAC /* PBXTargetDependency */,
09A95346D3A197044EFD13FCA1770E02 /* PBXTargetDependency */,
);
name = "Pods-OCR-SDK";
productName = "Pods-OCR-SDK";
productReference = 5131EA118CFE71670689AFC2E40810BB /* Pods_OCR_SDK.framework */;
productType = "com.apple.product-type.framework";
};
/* End PBXNativeTarget section */
/* Begin PBXProject section */
BFDFE7DC352907FC980B868725387E98 /* Project object */ = {
isa = PBXProject;
attributes = {
LastSwiftUpdateCheck = 1100;
LastUpgradeCheck = 1100;
};
buildConfigurationList = 4821239608C13582E20E6DA73FD5F1F9 /* Build configuration list for PBXProject "Pods" */;
compatibilityVersion = "Xcode 10.0";
developmentRegion = en;
hasScannedForEncodings = 0;
knownRegions = (
en,
Base,
);
mainGroup = CF1408CF629C7361332E53B88F7BD30C;
productRefGroup = 5017E107FDF6AB6F0B29C3F523D6EA81 /* Products */;
projectDirPath = "";
projectRoot = "";
targets = (
DE1F4D51AD94C30627575AEE202FD099 /* Pods-OCR-SDK */,
AC559E53E13B6FBEF4F5CC310A73AFE6 /* TensorFlowLiteC */,
10418167F619D6DA72BADAD10F9EC02B /* TensorFlowLiteSwift */,
);
};
/* End PBXProject section */
/* Begin PBXResourcesBuildPhase section */
679E7170D73F96A9F6F4AF34B095DD4A /* Resources */ = {
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
A2C04265C83435C3D235A35531432359 /* Resources */ = {
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXResourcesBuildPhase section */
/* Begin PBXSourcesBuildPhase section */
65F0B64B43BB6554ED14755D9819F62D /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
9966F63F2570886C63A9B4576A355A95 /* Pods-OCR-SDK-dummy.m in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
B5E4027CF6A0738C877D048A2F15F3DC /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
B30DFF9007000B08CA3613C616943724 /* Delegate.swift in Sources */,
89AA3C20F56E771C5934B2DCAA379CC4 /* Interpreter.swift in Sources */,
EE7899063120392B7E2BECDA0C8D3086 /* InterpreterError.swift in Sources */,
0E993296CF79F75FB6EA210A5AC531FD /* MetalDelegate.swift in Sources */,
B9BD81110588FF8EF267BD6647951BD4 /* Model.swift in Sources */,
30806718C80ACFD657320ADD85869F3A /* QuantizationParameters.swift in Sources */,
E677E11481A8D892C303DCE89B807BA6 /* Tensor.swift in Sources */,
405DB926AB8462D8F5F96C5576BD24F1 /* TensorFlowLite.swift in Sources */,
8DA7D5FE089801A38D7226BB422A3AE9 /* TensorFlowLiteSwift-dummy.m in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXSourcesBuildPhase section */
/* Begin PBXTargetDependency section */
09A95346D3A197044EFD13FCA1770E02 /* PBXTargetDependency */ = {
isa = PBXTargetDependency;
name = TensorFlowLiteSwift;
target = 10418167F619D6DA72BADAD10F9EC02B /* TensorFlowLiteSwift */;
targetProxy = 22D64D58AC950A742268FDB71854F742 /* PBXContainerItemProxy */;
};
2EA7F772E5D8762DCB4049AD1E86FAAC /* PBXTargetDependency */ = {
isa = PBXTargetDependency;
name = TensorFlowLiteC;
target = AC559E53E13B6FBEF4F5CC310A73AFE6 /* TensorFlowLiteC */;
targetProxy = 2BE2C1AFBCE2BDF286D202C2A3861184 /* PBXContainerItemProxy */;
};
9C7C0C7FC1E8F36E403A9DA58D08A654 /* PBXTargetDependency */ = {
isa = PBXTargetDependency;
name = TensorFlowLiteC;
target = AC559E53E13B6FBEF4F5CC310A73AFE6 /* TensorFlowLiteC */;
targetProxy = 9CD29C43799CE27402701E8FDBF5CACF /* PBXContainerItemProxy */;
};
/* End PBXTargetDependency section */
/* Begin XCBuildConfiguration section */
025B8BDA71BF4A287EF237A8C2EB6606 /* Release */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = 4DEF9604B3A10391246BB01C3B360192 /* TensorFlowLiteSwift.release.xcconfig */;
buildSettings = {
CODE_SIGN_IDENTITY = "";
"CODE_SIGN_IDENTITY[sdk=appletvos*]" = "";
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "";
"CODE_SIGN_IDENTITY[sdk=watchos*]" = "";
CURRENT_PROJECT_VERSION = 1;
DEFINES_MODULE = YES;
DYLIB_COMPATIBILITY_VERSION = 1;
DYLIB_CURRENT_VERSION = 1;
DYLIB_INSTALL_NAME_BASE = "@rpath";
GCC_PREFIX_HEADER = "Target Support Files/TensorFlowLiteSwift/TensorFlowLiteSwift-prefix.pch";
INFOPLIST_FILE = "Target Support Files/TensorFlowLiteSwift/TensorFlowLiteSwift-Info.plist";
INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks";
IPHONEOS_DEPLOYMENT_TARGET = 9.0;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
"@loader_path/Frameworks",
);
MACH_O_TYPE = staticlib;
MODULEMAP_FILE = "Target Support Files/TensorFlowLiteSwift/TensorFlowLiteSwift.modulemap";
PRODUCT_MODULE_NAME = TensorFlowLite;
PRODUCT_NAME = TensorFlowLite;
SDKROOT = iphoneos;
SKIP_INSTALL = YES;
SWIFT_ACTIVE_COMPILATION_CONDITIONS = "$(inherited) ";
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2";
VALIDATE_PRODUCT = YES;
VERSIONING_SYSTEM = "apple-generic";
VERSION_INFO_PREFIX = "";
};
name = Release;
};
196DFA3E4A09A28224918543529A1885 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ANALYZER_NONNULL = YES;
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++14";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_ENABLE_OBJC_WEAK = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = dwarf;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_TESTABILITY = YES;
GCC_C_LANGUAGE_STANDARD = gnu11;
GCC_DYNAMIC_NO_PIC = NO;
GCC_NO_COMMON_BLOCKS = YES;
GCC_OPTIMIZATION_LEVEL = 0;
GCC_PREPROCESSOR_DEFINITIONS = (
"POD_CONFIGURATION_DEBUG=1",
"DEBUG=1",
"$(inherited)",
);
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 10.0;
MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
MTL_FAST_MATH = YES;
ONLY_ACTIVE_ARCH = YES;
PRODUCT_NAME = "$(TARGET_NAME)";
STRIP_INSTALLED_PRODUCT = NO;
SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG;
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
SWIFT_VERSION = 5.0;
SYMROOT = "${SRCROOT}/../build";
};
name = Debug;
};
2E5E1BF7B970413ED45CC6FBB2C37C41 /* Debug */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = AED476478C959569CFCC3DF9E47408C5 /* Pods-OCR-SDK.debug.xcconfig */;
buildSettings = {
ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = NO;
CLANG_ENABLE_OBJC_WEAK = NO;
CODE_SIGN_IDENTITY = "";
"CODE_SIGN_IDENTITY[sdk=appletvos*]" = "";
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "";
"CODE_SIGN_IDENTITY[sdk=watchos*]" = "";
CURRENT_PROJECT_VERSION = 1;
DEFINES_MODULE = YES;
DYLIB_COMPATIBILITY_VERSION = 1;
DYLIB_CURRENT_VERSION = 1;
DYLIB_INSTALL_NAME_BASE = "@rpath";
INFOPLIST_FILE = "Target Support Files/Pods-OCR-SDK/Pods-OCR-SDK-Info.plist";
INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks";
IPHONEOS_DEPLOYMENT_TARGET = 10.0;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
"@loader_path/Frameworks",
);
MACH_O_TYPE = staticlib;
MODULEMAP_FILE = "Target Support Files/Pods-OCR-SDK/Pods-OCR-SDK.modulemap";
OTHER_LDFLAGS = "";
OTHER_LIBTOOLFLAGS = "";
PODS_ROOT = "$(SRCROOT)";
PRODUCT_BUNDLE_IDENTIFIER = "org.cocoapods.${PRODUCT_NAME:rfc1034identifier}";
PRODUCT_NAME = "$(TARGET_NAME:c99extidentifier)";
SDKROOT = iphoneos;
SKIP_INSTALL = YES;
TARGETED_DEVICE_FAMILY = "1,2";
VERSIONING_SYSTEM = "apple-generic";
VERSION_INFO_PREFIX = "";
};
name = Debug;
};
41861568EC8233F1D514C49D051BFD99 /* Release */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = E6111D41D5230B9B2A5C44624B29EA42 /* Pods-OCR-SDK.release.xcconfig */;
buildSettings = {
ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = NO;
CLANG_ENABLE_OBJC_WEAK = NO;
CODE_SIGN_IDENTITY = "";
"CODE_SIGN_IDENTITY[sdk=appletvos*]" = "";
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "";
"CODE_SIGN_IDENTITY[sdk=watchos*]" = "";
CURRENT_PROJECT_VERSION = 1;
DEFINES_MODULE = YES;
DYLIB_COMPATIBILITY_VERSION = 1;
DYLIB_CURRENT_VERSION = 1;
DYLIB_INSTALL_NAME_BASE = "@rpath";
INFOPLIST_FILE = "Target Support Files/Pods-OCR-SDK/Pods-OCR-SDK-Info.plist";
INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks";
IPHONEOS_DEPLOYMENT_TARGET = 10.0;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
"@loader_path/Frameworks",
);
MACH_O_TYPE = staticlib;
MODULEMAP_FILE = "Target Support Files/Pods-OCR-SDK/Pods-OCR-SDK.modulemap";
OTHER_LDFLAGS = "";
OTHER_LIBTOOLFLAGS = "";
PODS_ROOT = "$(SRCROOT)";
PRODUCT_BUNDLE_IDENTIFIER = "org.cocoapods.${PRODUCT_NAME:rfc1034identifier}";
PRODUCT_NAME = "$(TARGET_NAME:c99extidentifier)";
SDKROOT = iphoneos;
SKIP_INSTALL = YES;
TARGETED_DEVICE_FAMILY = "1,2";
VALIDATE_PRODUCT = YES;
VERSIONING_SYSTEM = "apple-generic";
VERSION_INFO_PREFIX = "";
};
name = Release;
};
5C4997AE81106EC6C2D8E284E07D3ADA /* Debug */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = 195023F1232F59DC7609EC8B27ABF4CC /* TensorFlowLiteC.debug.xcconfig */;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CODE_SIGN_IDENTITY = "iPhone Developer";
IPHONEOS_DEPLOYMENT_TARGET = 9.0;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
);
SDKROOT = iphoneos;
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Debug;
};
A5054D9203F74D414013169CE2108677 /* Release */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = 7DC911E4735E7FF695F58024B3BA19FC /* TensorFlowLiteC.release.xcconfig */;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CODE_SIGN_IDENTITY = "iPhone Developer";
IPHONEOS_DEPLOYMENT_TARGET = 9.0;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
);
SDKROOT = iphoneos;
TARGETED_DEVICE_FAMILY = "1,2";
VALIDATE_PRODUCT = YES;
};
name = Release;
};
AFB6504F17DBCA07DA44BA4B2D0F587E /* Debug */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = CF82B8C58A0FD821537E3660EAB99FAB /* TensorFlowLiteSwift.debug.xcconfig */;
buildSettings = {
CODE_SIGN_IDENTITY = "";
"CODE_SIGN_IDENTITY[sdk=appletvos*]" = "";
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "";
"CODE_SIGN_IDENTITY[sdk=watchos*]" = "";
CURRENT_PROJECT_VERSION = 1;
DEFINES_MODULE = YES;
DYLIB_COMPATIBILITY_VERSION = 1;
DYLIB_CURRENT_VERSION = 1;
DYLIB_INSTALL_NAME_BASE = "@rpath";
GCC_PREFIX_HEADER = "Target Support Files/TensorFlowLiteSwift/TensorFlowLiteSwift-prefix.pch";
INFOPLIST_FILE = "Target Support Files/TensorFlowLiteSwift/TensorFlowLiteSwift-Info.plist";
INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks";
IPHONEOS_DEPLOYMENT_TARGET = 9.0;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
"@loader_path/Frameworks",
);
MACH_O_TYPE = staticlib;
MODULEMAP_FILE = "Target Support Files/TensorFlowLiteSwift/TensorFlowLiteSwift.modulemap";
PRODUCT_MODULE_NAME = TensorFlowLite;
PRODUCT_NAME = TensorFlowLite;
SDKROOT = iphoneos;
SKIP_INSTALL = YES;
SWIFT_ACTIVE_COMPILATION_CONDITIONS = "$(inherited) ";
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2";
VERSIONING_SYSTEM = "apple-generic";
VERSION_INFO_PREFIX = "";
};
name = Debug;
};
B01D14FDC83DCF9D4BE53066BEA96D05 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ANALYZER_NONNULL = YES;
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++14";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_ENABLE_OBJC_WEAK = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
GCC_C_LANGUAGE_STANDARD = gnu11;
GCC_NO_COMMON_BLOCKS = YES;
GCC_PREPROCESSOR_DEFINITIONS = (
"POD_CONFIGURATION_RELEASE=1",
"$(inherited)",
);
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 10.0;
MTL_ENABLE_DEBUG_INFO = NO;
MTL_FAST_MATH = YES;
PRODUCT_NAME = "$(TARGET_NAME)";
STRIP_INSTALLED_PRODUCT = NO;
SWIFT_COMPILATION_MODE = wholemodule;
SWIFT_OPTIMIZATION_LEVEL = "-O";
SWIFT_VERSION = 5.0;
SYMROOT = "${SRCROOT}/../build";
};
name = Release;
};
/* End XCBuildConfiguration section */
/* Begin XCConfigurationList section */
21F7D35700D8C328AA9B369387D85320 /* Build configuration list for PBXNativeTarget "Pods-OCR-SDK" */ = {
isa = XCConfigurationList;
buildConfigurations = (
2E5E1BF7B970413ED45CC6FBB2C37C41 /* Debug */,
41861568EC8233F1D514C49D051BFD99 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
243169D7A56C0468A9C10463619D018A /* Build configuration list for PBXAggregateTarget "TensorFlowLiteC" */ = {
isa = XCConfigurationList;
buildConfigurations = (
5C4997AE81106EC6C2D8E284E07D3ADA /* Debug */,
A5054D9203F74D414013169CE2108677 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
4821239608C13582E20E6DA73FD5F1F9 /* Build configuration list for PBXProject "Pods" */ = {
isa = XCConfigurationList;
buildConfigurations = (
196DFA3E4A09A28224918543529A1885 /* Debug */,
B01D14FDC83DCF9D4BE53066BEA96D05 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
8BEE4D4068BDD06E26CA520708ABB5DF /* Build configuration list for PBXNativeTarget "TensorFlowLiteSwift" */ = {
isa = XCConfigurationList;
buildConfigurations = (
AFB6504F17DBCA07DA44BA4B2D0F587E /* Debug */,
025B8BDA71BF4A287EF237A8C2EB6606 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
/* End XCConfigurationList section */
};
rootObject = BFDFE7DC352907FC980B868725387E98 /* Project object */;
}
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "1100"
version = "1.3">
<BuildAction
parallelizeBuildables = "YES"
buildImplicitDependencies = "YES">
<BuildActionEntries>
<BuildActionEntry
buildForTesting = "YES"
buildForRunning = "YES"
buildForProfiling = "YES"
buildForArchiving = "YES"
buildForAnalyzing = "YES">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "DE1F4D51AD94C30627575AEE202FD099"
BuildableName = "Pods_OCR_SDK.framework"
BlueprintName = "Pods-OCR-SDK"
ReferencedContainer = "container:Pods.xcodeproj">
</BuildableReference>
</BuildActionEntry>
</BuildActionEntries>
</BuildAction>
<TestAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
shouldUseLaunchSchemeArgsEnv = "YES">
<Testables>
</Testables>
</TestAction>
<LaunchAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
launchStyle = "0"
useCustomWorkingDirectory = "NO"
ignoresPersistentStateOnLaunch = "NO"
debugDocumentVersioning = "YES"
debugServiceExtension = "internal"
allowLocationSimulation = "YES">
</LaunchAction>
<ProfileAction
buildConfiguration = "Release"
shouldUseLaunchSchemeArgsEnv = "YES"
savedToolIdentifier = ""
useCustomWorkingDirectory = "NO"
debugDocumentVersioning = "YES">
</ProfileAction>
<AnalyzeAction
buildConfiguration = "Debug">
</AnalyzeAction>
<ArchiveAction
buildConfiguration = "Release"
revealArchiveInOrganizer = "YES">
</ArchiveAction>
</Scheme>
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "1100"
version = "1.3">
<BuildAction
parallelizeBuildables = "YES"
buildImplicitDependencies = "YES">
<BuildActionEntries>
<BuildActionEntry
buildForAnalyzing = "YES"
buildForTesting = "YES"
buildForRunning = "YES"
buildForProfiling = "YES"
buildForArchiving = "YES">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "AC559E53E13B6FBEF4F5CC310A73AFE6"
BuildableName = "TensorFlowLiteC"
BlueprintName = "TensorFlowLiteC"
ReferencedContainer = "container:Pods.xcodeproj">
</BuildableReference>
</BuildActionEntry>
</BuildActionEntries>
</BuildAction>
<TestAction
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
shouldUseLaunchSchemeArgsEnv = "YES"
buildConfiguration = "Debug">
<AdditionalOptions>
</AdditionalOptions>
</TestAction>
<LaunchAction
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
launchStyle = "0"
useCustomWorkingDirectory = "NO"
ignoresPersistentStateOnLaunch = "NO"
debugDocumentVersioning = "YES"
debugServiceExtension = "internal"
buildConfiguration = "Debug"
allowLocationSimulation = "YES">
<AdditionalOptions>
</AdditionalOptions>
</LaunchAction>
<ProfileAction
savedToolIdentifier = ""
useCustomWorkingDirectory = "NO"
debugDocumentVersioning = "YES"
buildConfiguration = "Release"
shouldUseLaunchSchemeArgsEnv = "YES">
</ProfileAction>
<AnalyzeAction
buildConfiguration = "Debug">
</AnalyzeAction>
<ArchiveAction
buildConfiguration = "Release"
revealArchiveInOrganizer = "YES">
</ArchiveAction>
</Scheme>
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "1100"
version = "1.3">
<BuildAction
parallelizeBuildables = "YES"
buildImplicitDependencies = "YES">
<BuildActionEntries>
<BuildActionEntry
buildForTesting = "YES"
buildForRunning = "YES"
buildForProfiling = "YES"
buildForArchiving = "YES"
buildForAnalyzing = "YES">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "10418167F619D6DA72BADAD10F9EC02B"
BuildableName = "TensorFlowLite.framework"
BlueprintName = "TensorFlowLiteSwift"
ReferencedContainer = "container:Pods.xcodeproj">
</BuildableReference>
</BuildActionEntry>
</BuildActionEntries>
</BuildAction>
<TestAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
shouldUseLaunchSchemeArgsEnv = "YES">
<Testables>
</Testables>
</TestAction>
<LaunchAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
launchStyle = "0"
useCustomWorkingDirectory = "NO"
ignoresPersistentStateOnLaunch = "NO"
debugDocumentVersioning = "YES"
debugServiceExtension = "internal"
allowLocationSimulation = "YES">
</LaunchAction>
<ProfileAction
buildConfiguration = "Release"
shouldUseLaunchSchemeArgsEnv = "YES"
savedToolIdentifier = ""
useCustomWorkingDirectory = "NO"
debugDocumentVersioning = "YES">
</ProfileAction>
<AnalyzeAction
buildConfiguration = "Debug">
</AnalyzeAction>
<ArchiveAction
buildConfiguration = "Release"
revealArchiveInOrganizer = "YES">
</ArchiveAction>
</Scheme>
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>SchemeUserState</key>
<dict>
<key>Pods-OCR-SDK.xcscheme</key>
<dict>
<key>isShown</key>
<false/>
</dict>
<key>TensorFlowLiteC.xcscheme</key>
<dict>
<key>isShown</key>
<false/>
</dict>
<key>TensorFlowLiteSwift.xcscheme</key>
<dict>
<key>isShown</key>
<false/>
</dict>
</dict>
<key>SuppressBuildableAutocreation</key>
<dict/>
</dict>
</plist>
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>en</string>
<key>CFBundleExecutable</key>
<string>${EXECUTABLE_NAME}</string>
<key>CFBundleIdentifier</key>
<string>${PRODUCT_BUNDLE_IDENTIFIER}</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>${PRODUCT_NAME}</string>
<key>CFBundlePackageType</key>
<string>FMWK</string>
<key>CFBundleShortVersionString</key>
<string>1.0.0</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleVersion</key>
<string>${CURRENT_PROJECT_VERSION}</string>
<key>NSPrincipalClass</key>
<string></string>
</dict>
</plist>
# Acknowledgements
This application makes use of the following third party libraries:
## TensorFlowLiteSwift
Copyright 2019 The TensorFlow Authors. All rights reserved.
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Generated by CocoaPods - https://cocoapods.org
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>PreferenceSpecifiers</key>
<array>
<dict>
<key>FooterText</key>
<string>This application makes use of the following third party libraries:</string>
<key>Title</key>
<string>Acknowledgements</string>
<key>Type</key>
<string>PSGroupSpecifier</string>
</dict>
<dict>
<key>FooterText</key>
<string>Copyright 2019 The TensorFlow Authors. All rights reserved.
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
</string>
<key>License</key>
<string>Apache</string>
<key>Title</key>
<string>TensorFlowLiteSwift</string>
<key>Type</key>
<string>PSGroupSpecifier</string>
</dict>
<dict>
<key>FooterText</key>
<string>Generated by CocoaPods - https://cocoapods.org</string>
<key>Title</key>
<string></string>
<key>Type</key>
<string>PSGroupSpecifier</string>
</dict>
</array>
<key>StringsTable</key>
<string>Acknowledgements</string>
<key>Title</key>
<string>Acknowledgements</string>
</dict>
</plist>
#import <Foundation/Foundation.h>
@interface PodsDummy_Pods_OCR_SDK : NSObject
@end
@implementation PodsDummy_Pods_OCR_SDK
@end
${PODS_ROOT}/Target Support Files/Pods-OCR-SDK/Pods-OCR-SDK-resources.sh
${PODS_CONFIGURATION_BUILD_DIR}/GoogleMobileVision/GoogleMVFaceDetectorResources.bundle
\ No newline at end of file
${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/GoogleMVFaceDetectorResources.bundle
\ No newline at end of file
${PODS_ROOT}/Target Support Files/Pods-OCR-SDK/Pods-OCR-SDK-resources.sh
${PODS_CONFIGURATION_BUILD_DIR}/GoogleMobileVision/GoogleMVFaceDetectorResources.bundle
\ No newline at end of file
${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/GoogleMVFaceDetectorResources.bundle
\ No newline at end of file
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${UNLOCALIZED_RESOURCES_FOLDER_PATH+x} ]; then
# If UNLOCALIZED_RESOURCES_FOLDER_PATH is not set, then there's nowhere for us to copy
# resources to, so exit 0 (signalling the script phase was successful).
exit 0
fi
mkdir -p "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
RESOURCES_TO_COPY=${PODS_ROOT}/resources-to-copy-${TARGETNAME}.txt
> "$RESOURCES_TO_COPY"
XCASSET_FILES=()
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
case "${TARGETED_DEVICE_FAMILY:-}" in
1,2)
TARGET_DEVICE_ARGS="--target-device ipad --target-device iphone"
;;
1)
TARGET_DEVICE_ARGS="--target-device iphone"
;;
2)
TARGET_DEVICE_ARGS="--target-device ipad"
;;
3)
TARGET_DEVICE_ARGS="--target-device tv"
;;
4)
TARGET_DEVICE_ARGS="--target-device watch"
;;
*)
TARGET_DEVICE_ARGS="--target-device mac"
;;
esac
install_resource()
{
if [[ "$1" = /* ]] ; then
RESOURCE_PATH="$1"
else
RESOURCE_PATH="${PODS_ROOT}/$1"
fi
if [[ ! -e "$RESOURCE_PATH" ]] ; then
cat << EOM
error: Resource "$RESOURCE_PATH" not found. Run 'pod install' to update the copy resources script.
EOM
exit 1
fi
case $RESOURCE_PATH in
*.storyboard)
echo "ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile ${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .storyboard`.storyboardc $RESOURCE_PATH --sdk ${SDKROOT} ${TARGET_DEVICE_ARGS}" || true
ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .storyboard`.storyboardc" "$RESOURCE_PATH" --sdk "${SDKROOT}" ${TARGET_DEVICE_ARGS}
;;
*.xib)
echo "ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile ${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .xib`.nib $RESOURCE_PATH --sdk ${SDKROOT} ${TARGET_DEVICE_ARGS}" || true
ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .xib`.nib" "$RESOURCE_PATH" --sdk "${SDKROOT}" ${TARGET_DEVICE_ARGS}
;;
*.framework)
echo "mkdir -p ${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" || true
mkdir -p "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" $RESOURCE_PATH ${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" || true
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
;;
*.xcdatamodel)
echo "xcrun momc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH"`.mom\"" || true
xcrun momc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodel`.mom"
;;
*.xcdatamodeld)
echo "xcrun momc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodeld`.momd\"" || true
xcrun momc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodeld`.momd"
;;
*.xcmappingmodel)
echo "xcrun mapc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcmappingmodel`.cdm\"" || true
xcrun mapc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcmappingmodel`.cdm"
;;
*.xcassets)
ABSOLUTE_XCASSET_FILE="$RESOURCE_PATH"
XCASSET_FILES+=("$ABSOLUTE_XCASSET_FILE")
;;
*)
echo "$RESOURCE_PATH" || true
echo "$RESOURCE_PATH" >> "$RESOURCES_TO_COPY"
;;
esac
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_resource "${PODS_CONFIGURATION_BUILD_DIR}/GoogleMobileVision/GoogleMVFaceDetectorResources.bundle"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_resource "${PODS_CONFIGURATION_BUILD_DIR}/GoogleMobileVision/GoogleMVFaceDetectorResources.bundle"
fi
mkdir -p "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
rsync -avr --copy-links --no-relative --exclude '*/.svn/*' --files-from="$RESOURCES_TO_COPY" / "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
if [[ "${ACTION}" == "install" ]] && [[ "${SKIP_INSTALL}" == "NO" ]]; then
mkdir -p "${INSTALL_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
rsync -avr --copy-links --no-relative --exclude '*/.svn/*' --files-from="$RESOURCES_TO_COPY" / "${INSTALL_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
fi
rm -f "$RESOURCES_TO_COPY"
if [[ -n "${WRAPPER_EXTENSION}" ]] && [ "`xcrun --find actool`" ] && [ -n "${XCASSET_FILES:-}" ]
then
# Find all other xcassets (this unfortunately includes those of path pods and other targets).
OTHER_XCASSETS=$(find -L "$PWD" -iname "*.xcassets" -type d)
while read line; do
if [[ $line != "${PODS_ROOT}*" ]]; then
XCASSET_FILES+=("$line")
fi
done <<<"$OTHER_XCASSETS"
if [ -z ${ASSETCATALOG_COMPILER_APPICON_NAME+x} ]; then
printf "%s\0" "${XCASSET_FILES[@]}" | xargs -0 xcrun actool --output-format human-readable-text --notices --warnings --platform "${PLATFORM_NAME}" --minimum-deployment-target "${!DEPLOYMENT_TARGET_SETTING_NAME}" ${TARGET_DEVICE_ARGS} --compress-pngs --compile "${BUILT_PRODUCTS_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
else
printf "%s\0" "${XCASSET_FILES[@]}" | xargs -0 xcrun actool --output-format human-readable-text --notices --warnings --platform "${PLATFORM_NAME}" --minimum-deployment-target "${!DEPLOYMENT_TARGET_SETTING_NAME}" ${TARGET_DEVICE_ARGS} --compress-pngs --compile "${BUILT_PRODUCTS_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}" --app-icon "${ASSETCATALOG_COMPILER_APPICON_NAME}" --output-partial-info-plist "${TARGET_TEMP_DIR}/assetcatalog_generated_info_cocoapods.plist"
fi
fi
#ifdef __OBJC__
#import <UIKit/UIKit.h>
#else
#ifndef FOUNDATION_EXPORT
#if defined(__cplusplus)
#define FOUNDATION_EXPORT extern "C"
#else
#define FOUNDATION_EXPORT extern
#endif
#endif
#endif
FOUNDATION_EXPORT double Pods_OCR_SDKVersionNumber;
FOUNDATION_EXPORT const unsigned char Pods_OCR_SDKVersionString[];
FRAMEWORK_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/TensorFlowLiteSwift" "${PODS_ROOT}/TensorFlowLiteC/Frameworks"
GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1
HEADER_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/TensorFlowLiteSwift/TensorFlowLite.framework/Headers"
LD_RUNPATH_SEARCH_PATHS = $(inherited) '@executable_path/Frameworks' '@loader_path/Frameworks' '@executable_path/../../Frameworks'
OTHER_LDFLAGS = $(inherited) -ObjC -l"c++" -framework "TensorFlowLite" -framework "TensorFlowLiteC"
OTHER_SWIFT_FLAGS = $(inherited) -D COCOAPODS
PODS_BUILD_DIR = ${BUILD_DIR}
PODS_CONFIGURATION_BUILD_DIR = ${PODS_BUILD_DIR}/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)
PODS_PODFILE_DIR_PATH = ${SRCROOT}/.
PODS_ROOT = ${SRCROOT}/Pods
USE_RECURSIVE_SCRIPT_INPUTS_IN_SCRIPT_PHASES = YES
framework module Pods_OCR_SDK {
umbrella header "Pods-OCR-SDK-umbrella.h"
export *
module * { export * }
}
FRAMEWORK_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/TensorFlowLiteSwift" "${PODS_ROOT}/TensorFlowLiteC/Frameworks"
GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1
HEADER_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/TensorFlowLiteSwift/TensorFlowLite.framework/Headers"
LD_RUNPATH_SEARCH_PATHS = $(inherited) '@executable_path/Frameworks' '@loader_path/Frameworks' '@executable_path/../../Frameworks'
OTHER_LDFLAGS = $(inherited) -ObjC -l"c++" -framework "TensorFlowLite" -framework "TensorFlowLiteC"
OTHER_SWIFT_FLAGS = $(inherited) -D COCOAPODS
PODS_BUILD_DIR = ${BUILD_DIR}
PODS_CONFIGURATION_BUILD_DIR = ${PODS_BUILD_DIR}/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)
PODS_PODFILE_DIR_PATH = ${SRCROOT}/.
PODS_ROOT = ${SRCROOT}/Pods
USE_RECURSIVE_SCRIPT_INPUTS_IN_SCRIPT_PHASES = YES
CONFIGURATION_BUILD_DIR = ${PODS_CONFIGURATION_BUILD_DIR}/TensorFlowLiteC
FRAMEWORK_SEARCH_PATHS = $(inherited) "${PODS_ROOT}/TensorFlowLiteC/Frameworks"
GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1
OTHER_LDFLAGS = $(inherited) -l"c++"
PODS_BUILD_DIR = ${BUILD_DIR}
PODS_CONFIGURATION_BUILD_DIR = ${PODS_BUILD_DIR}/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)
PODS_ROOT = ${SRCROOT}
PODS_TARGET_SRCROOT = ${PODS_ROOT}/TensorFlowLiteC
PRODUCT_BUNDLE_IDENTIFIER = org.cocoapods.${PRODUCT_NAME:rfc1034identifier}
SKIP_INSTALL = YES
USE_RECURSIVE_SCRIPT_INPUTS_IN_SCRIPT_PHASES = YES
CONFIGURATION_BUILD_DIR = ${PODS_CONFIGURATION_BUILD_DIR}/TensorFlowLiteC
FRAMEWORK_SEARCH_PATHS = $(inherited) "${PODS_ROOT}/TensorFlowLiteC/Frameworks"
GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1
OTHER_LDFLAGS = $(inherited) -l"c++"
PODS_BUILD_DIR = ${BUILD_DIR}
PODS_CONFIGURATION_BUILD_DIR = ${PODS_BUILD_DIR}/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)
PODS_ROOT = ${SRCROOT}
PODS_TARGET_SRCROOT = ${PODS_ROOT}/TensorFlowLiteC
PRODUCT_BUNDLE_IDENTIFIER = org.cocoapods.${PRODUCT_NAME:rfc1034identifier}
SKIP_INSTALL = YES
USE_RECURSIVE_SCRIPT_INPUTS_IN_SCRIPT_PHASES = YES
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>en</string>
<key>CFBundleExecutable</key>
<string>${EXECUTABLE_NAME}</string>
<key>CFBundleIdentifier</key>
<string>${PRODUCT_BUNDLE_IDENTIFIER}</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>${PRODUCT_NAME}</string>
<key>CFBundlePackageType</key>
<string>FMWK</string>
<key>CFBundleShortVersionString</key>
<string>2.2.0</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleVersion</key>
<string>${CURRENT_PROJECT_VERSION}</string>
<key>NSPrincipalClass</key>
<string></string>
</dict>
</plist>
#import <Foundation/Foundation.h>
@interface PodsDummy_TensorFlowLiteSwift : NSObject
@end
@implementation PodsDummy_TensorFlowLiteSwift
@end
#ifdef __OBJC__
#import <UIKit/UIKit.h>
#else
#ifndef FOUNDATION_EXPORT
#if defined(__cplusplus)
#define FOUNDATION_EXPORT extern "C"
#else
#define FOUNDATION_EXPORT extern
#endif
#endif
#endif
#ifdef __OBJC__
#import <UIKit/UIKit.h>
#else
#ifndef FOUNDATION_EXPORT
#if defined(__cplusplus)
#define FOUNDATION_EXPORT extern "C"
#else
#define FOUNDATION_EXPORT extern
#endif
#endif
#endif
FOUNDATION_EXPORT double TensorFlowLiteVersionNumber;
FOUNDATION_EXPORT const unsigned char TensorFlowLiteVersionString[];
CONFIGURATION_BUILD_DIR = ${PODS_CONFIGURATION_BUILD_DIR}/TensorFlowLiteSwift
FRAMEWORK_SEARCH_PATHS = $(inherited) "${PODS_ROOT}/TensorFlowLiteC/Frameworks"
GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1
OTHER_SWIFT_FLAGS = $(inherited) -D COCOAPODS
PODS_BUILD_DIR = ${BUILD_DIR}
PODS_CONFIGURATION_BUILD_DIR = ${PODS_BUILD_DIR}/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)
PODS_ROOT = ${SRCROOT}
PODS_TARGET_SRCROOT = ${PODS_ROOT}/TensorFlowLiteSwift
PRODUCT_BUNDLE_IDENTIFIER = org.cocoapods.${PRODUCT_NAME:rfc1034identifier}
SKIP_INSTALL = YES
USE_RECURSIVE_SCRIPT_INPUTS_IN_SCRIPT_PHASES = YES
framework module TensorFlowLite {
umbrella header "TensorFlowLiteSwift-umbrella.h"
export *
module * { export * }
}
CONFIGURATION_BUILD_DIR = ${PODS_CONFIGURATION_BUILD_DIR}/TensorFlowLiteSwift
FRAMEWORK_SEARCH_PATHS = $(inherited) "${PODS_ROOT}/TensorFlowLiteC/Frameworks"
GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1
OTHER_SWIFT_FLAGS = $(inherited) -D COCOAPODS
PODS_BUILD_DIR = ${BUILD_DIR}
PODS_CONFIGURATION_BUILD_DIR = ${PODS_BUILD_DIR}/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)
PODS_ROOT = ${SRCROOT}
PODS_TARGET_SRCROOT = ${PODS_ROOT}/TensorFlowLiteSwift
PRODUCT_BUNDLE_IDENTIFIER = org.cocoapods.${PRODUCT_NAME:rfc1034identifier}
SKIP_INSTALL = YES
USE_RECURSIVE_SCRIPT_INPUTS_IN_SCRIPT_PHASES = YES
#import "c_api.h"
#import "common.h"
#import "metal_delegate.h"
/* Copyright 2018 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef TENSORFLOW_LITE_C_C_API_H_
#define TENSORFLOW_LITE_C_C_API_H_
#include <stdarg.h>
#include <stdint.h>
#include "common.h"
// --------------------------------------------------------------------------
/// C API for TensorFlow Lite.
///
/// The API leans towards simplicity and uniformity instead of convenience, as
/// most usage will be by language-specific wrappers. It provides largely the
/// same set of functionality as that of the C++ TensorFlow Lite `Interpreter`
/// API, but is useful for shared libraries where having a stable ABI boundary
/// is important.
///
/// Conventions:
/// * We use the prefix TfLite for everything in the API.
/// * size_t is used to represent byte sizes of objects that are
/// materialized in the address space of the calling process.
/// * int is used as an index into arrays.
///
/// Usage:
/// <pre><code>
/// // Create the model and interpreter options.
/// TfLiteModel* model = TfLiteModelCreateFromFile("/path/to/model.tflite");
/// TfLiteInterpreterOptions* options = TfLiteInterpreterOptionsCreate();
/// TfLiteInterpreterOptionsSetNumThreads(options, 2);
///
/// // Create the interpreter.
/// TfLiteInterpreter* interpreter = TfLiteInterpreterCreate(model, options);
///
/// // Allocate tensors and populate the input tensor data.
/// TfLiteInterpreterAllocateTensors(interpreter);
/// TfLiteTensor* input_tensor =
/// TfLiteInterpreterGetInputTensor(interpreter, 0);
/// TfLiteTensorCopyFromBuffer(input_tensor, input.data(),
/// input.size() * sizeof(float));
///
/// // Execute inference.
/// TfLiteInterpreterInvoke(interpreter);
///
/// // Extract the output tensor data.
/// const TfLiteTensor* output_tensor =
// TfLiteInterpreterGetOutputTensor(interpreter, 0);
/// TfLiteTensorCopyToBuffer(output_tensor, output.data(),
/// output.size() * sizeof(float));
///
/// // Dispose of the model and interpreter objects.
/// TfLiteInterpreterDelete(interpreter);
/// TfLiteInterpreterOptionsDelete(options);
/// TfLiteModelDelete(model);
#ifdef SWIG
#define TFL_CAPI_EXPORT
#else
#if defined(_WIN32)
#ifdef TFL_COMPILE_LIBRARY
#define TFL_CAPI_EXPORT __declspec(dllexport)
#else
#define TFL_CAPI_EXPORT __declspec(dllimport)
#endif // TFL_COMPILE_LIBRARY
#else
#define TFL_CAPI_EXPORT __attribute__((visibility("default")))
#endif // _WIN32
#endif // SWIG
#ifdef __cplusplus
extern "C" {
#endif // __cplusplus
// --------------------------------------------------------------------------
// TfLiteVersion returns a string describing version information of the
// TensorFlow Lite library. TensorFlow Lite uses semantic versioning.
TFL_CAPI_EXPORT extern const char* TfLiteVersion(void);
// --------------------------------------------------------------------------
// TfLiteModel wraps a loaded TensorFlow Lite model.
typedef struct TfLiteModel TfLiteModel;
// Returns a model from the provided buffer, or null on failure.
TFL_CAPI_EXPORT extern TfLiteModel* TfLiteModelCreate(const void* model_data,
size_t model_size);
// Returns a model from the provided file, or null on failure.
TFL_CAPI_EXPORT extern TfLiteModel* TfLiteModelCreateFromFile(
const char* model_path);
// Destroys the model instance.
TFL_CAPI_EXPORT extern void TfLiteModelDelete(TfLiteModel* model);
// --------------------------------------------------------------------------
// TfLiteInterpreterOptions allows customized interpreter configuration.
typedef struct TfLiteInterpreterOptions TfLiteInterpreterOptions;
// Returns a new interpreter options instances.
TFL_CAPI_EXPORT extern TfLiteInterpreterOptions*
TfLiteInterpreterOptionsCreate();
// Destroys the interpreter options instance.
TFL_CAPI_EXPORT extern void TfLiteInterpreterOptionsDelete(
TfLiteInterpreterOptions* options);
// Sets the number of CPU threads to use for the interpreter.
TFL_CAPI_EXPORT extern void TfLiteInterpreterOptionsSetNumThreads(
TfLiteInterpreterOptions* options, int32_t num_threads);
// Adds a delegate to be applied during `TfLiteInterpreter` creation.
//
// If delegate application fails, interpreter creation will also fail with an
// associated error logged.
//
// NOTE: The caller retains ownership of the delegate and should ensure that it
// remains valid for the duration of any created interpreter's lifetime.
TFL_CAPI_EXPORT extern void TfLiteInterpreterOptionsAddDelegate(
TfLiteInterpreterOptions* options, TfLiteDelegate* delegate);
// Sets a custom error reporter for interpreter execution.
//
// * `reporter` takes the provided `user_data` object, as well as a C-style
// format string and arg list (see also vprintf).
// * `user_data` is optional. If provided, it is owned by the client and must
// remain valid for the duration of the interpreter lifetime.
TFL_CAPI_EXPORT extern void TfLiteInterpreterOptionsSetErrorReporter(
TfLiteInterpreterOptions* options,
void (*reporter)(void* user_data, const char* format, va_list args),
void* user_data);
// --------------------------------------------------------------------------
// TfLiteInterpreter provides inference from a provided model.
typedef struct TfLiteInterpreter TfLiteInterpreter;
// Returns a new interpreter using the provided model and options, or null on
// failure.
//
// * `model` must be a valid model instance. The caller retains ownership of the
// object, and can destroy it immediately after creating the interpreter; the
// interpreter will maintain its own reference to the underlying model data.
// * `optional_options` may be null. The caller retains ownership of the object,
// and can safely destroy it immediately after creating the interpreter.
//
// NOTE: The client *must* explicitly allocate tensors before attempting to
// access input tensor data or invoke the interpreter.
TFL_CAPI_EXPORT extern TfLiteInterpreter* TfLiteInterpreterCreate(
const TfLiteModel* model, const TfLiteInterpreterOptions* optional_options);
// Destroys the interpreter.
TFL_CAPI_EXPORT extern void TfLiteInterpreterDelete(
TfLiteInterpreter* interpreter);
// Returns the number of input tensors associated with the model.
TFL_CAPI_EXPORT extern int32_t TfLiteInterpreterGetInputTensorCount(
const TfLiteInterpreter* interpreter);
// Returns the tensor associated with the input index.
// REQUIRES: 0 <= input_index < TfLiteInterpreterGetInputTensorCount(tensor)
TFL_CAPI_EXPORT extern TfLiteTensor* TfLiteInterpreterGetInputTensor(
const TfLiteInterpreter* interpreter, int32_t input_index);
// Resizes the specified input tensor.
//
// NOTE: After a resize, the client *must* explicitly allocate tensors before
// attempting to access the resized tensor data or invoke the interpreter.
// REQUIRES: 0 <= input_index < TfLiteInterpreterGetInputTensorCount(tensor)
TFL_CAPI_EXPORT extern TfLiteStatus TfLiteInterpreterResizeInputTensor(
TfLiteInterpreter* interpreter, int32_t input_index, const int* input_dims,
int32_t input_dims_size);
// Updates allocations for all tensors, resizing dependent tensors using the
// specified input tensor dimensionality.
//
// This is a relatively expensive operation, and need only be called after
// creating the graph and/or resizing any inputs.
TFL_CAPI_EXPORT extern TfLiteStatus TfLiteInterpreterAllocateTensors(
TfLiteInterpreter* interpreter);
// Runs inference for the loaded graph.
//
// NOTE: It is possible that the interpreter is not in a ready state to
// evaluate (e.g., if a ResizeInputTensor() has been performed without a call to
// AllocateTensors()).
TFL_CAPI_EXPORT extern TfLiteStatus TfLiteInterpreterInvoke(
TfLiteInterpreter* interpreter);
// Returns the number of output tensors associated with the model.
TFL_CAPI_EXPORT extern int32_t TfLiteInterpreterGetOutputTensorCount(
const TfLiteInterpreter* interpreter);
// Returns the tensor associated with the output index.
// REQUIRES: 0 <= input_index < TfLiteInterpreterGetOutputTensorCount(tensor)
//
// NOTE: The shape and underlying data buffer for output tensors may be not
// be available until after the output tensor has been both sized and allocated.
// In general, best practice is to interact with the output tensor *after*
// calling TfLiteInterpreterInvoke().
TFL_CAPI_EXPORT extern const TfLiteTensor* TfLiteInterpreterGetOutputTensor(
const TfLiteInterpreter* interpreter, int32_t output_index);
// --------------------------------------------------------------------------
// TfLiteTensor wraps data associated with a graph tensor.
//
// Note that, while the TfLiteTensor struct is not currently opaque, and its
// fields can be accessed directly, these methods are still convenient for
// language bindings. In the future the tensor struct will likely be made opaque
// in the public API.
// Returns the type of a tensor element.
TFL_CAPI_EXPORT extern TfLiteType TfLiteTensorType(const TfLiteTensor* tensor);
// Returns the number of dimensions that the tensor has.
TFL_CAPI_EXPORT extern int32_t TfLiteTensorNumDims(const TfLiteTensor* tensor);
// Returns the length of the tensor in the "dim_index" dimension.
// REQUIRES: 0 <= dim_index < TFLiteTensorNumDims(tensor)
TFL_CAPI_EXPORT extern int32_t TfLiteTensorDim(const TfLiteTensor* tensor,
int32_t dim_index);
// Returns the size of the underlying data in bytes.
TFL_CAPI_EXPORT extern size_t TfLiteTensorByteSize(const TfLiteTensor* tensor);
// Returns a pointer to the underlying data buffer.
//
// NOTE: The result may be null if tensors have not yet been allocated, e.g.,
// if the Tensor has just been created or resized and `TfLiteAllocateTensors()`
// has yet to be called, or if the output tensor is dynamically sized and the
// interpreter hasn't been invoked.
TFL_CAPI_EXPORT extern void* TfLiteTensorData(const TfLiteTensor* tensor);
// Returns the (null-terminated) name of the tensor.
TFL_CAPI_EXPORT extern const char* TfLiteTensorName(const TfLiteTensor* tensor);
// Returns the parameters for asymmetric quantization. The quantization
// parameters are only valid when the tensor type is `kTfLiteUInt8` and the
// `scale != 0`. Quantized values can be converted back to float using:
// real_value = scale * (quantized_value - zero_point);
TFL_CAPI_EXPORT extern TfLiteQuantizationParams TfLiteTensorQuantizationParams(
const TfLiteTensor* tensor);
// Copies from the provided input buffer into the tensor's buffer.
// REQUIRES: input_data_size == TfLiteTensorByteSize(tensor)
TFL_CAPI_EXPORT extern TfLiteStatus TfLiteTensorCopyFromBuffer(
TfLiteTensor* tensor, const void* input_data, size_t input_data_size);
// Copies to the provided output buffer from the tensor's buffer.
// REQUIRES: output_data_size == TfLiteTensorByteSize(tensor)
TFL_CAPI_EXPORT extern TfLiteStatus TfLiteTensorCopyToBuffer(
const TfLiteTensor* output_tensor, void* output_data,
size_t output_data_size);
#ifdef __cplusplus
} // extern "C"
#endif // __cplusplus
#endif // TENSORFLOW_LITE_C_C_API_H_
/* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
// This file defines common C types and APIs for implementing operations,
// delegates and other constructs in TensorFlow Lite. The actual operations and
// delegtes can be defined using C++, but the interface between the interpreter
// and the operations are C.
//
// Summary of abstractions
// TF_LITE_ENSURE - Self-sufficient error checking
// TfLiteStatus - Status reporting
// TfLiteIntArray - stores tensor shapes (dims),
// TfLiteContext - allows an op to access the tensors
// TfLiteTensor - tensor (a multidimensional array)
// TfLiteNode - a single node or operation
// TfLiteRegistration - the implementation of a conceptual operation.
// TfLiteDelegate - allows delegation of nodes to alternative backends.
//
// Some abstractions in this file are created and managed by Interpreter.
#ifndef TENSORFLOW_LITE_C_COMMON_H_
#define TENSORFLOW_LITE_C_COMMON_H_
#include <stdbool.h>
#include <stddef.h>
#include <stdint.h>
#ifdef __cplusplus
extern "C" {
#endif // __cplusplus
typedef enum TfLiteStatus { kTfLiteOk = 0, kTfLiteError = 1 } TfLiteStatus;
// The list of external context types known to TF Lite. This list exists solely
// to avoid conflicts and to ensure ops can share the external contexts they
// need. Access to the external contexts is controlled by one of the
// corresponding support files.
typedef enum TfLiteExternalContextType {
kTfLiteEigenContext = 0, // include eigen_support.h to use.
kTfLiteGemmLowpContext = 1, // include gemm_support.h to use.
kTfLiteEdgeTpuContext = 2, // Placeholder for Edge TPU support.
kTfLiteCpuBackendContext = 3, // include cpu_backend_support.h to use.
kTfLiteMaxExternalContexts = 4
} TfLiteExternalContextType;
// Forward declare so dependent structs and methods can reference these types
// prior to the struct definitions.
struct TfLiteContext;
struct TfLiteDelegate;
struct TfLiteRegistration;
// An external context is a collection of information unrelated to the TF Lite
// framework, but useful to a subset of the ops. TF Lite knows very little
// about about the actual contexts, but it keeps a list of them, and is able to
// refresh them if configurations like the number of recommended threads
// change.
typedef struct TfLiteExternalContext {
TfLiteExternalContextType type;
TfLiteStatus (*Refresh)(struct TfLiteContext* context);
} TfLiteExternalContext;
#define kTfLiteOptionalTensor (-1)
// Fixed size list of integers. Used for dimensions and inputs/outputs tensor
// indices
typedef struct TfLiteIntArray {
int size;
// gcc 6.1+ have a bug where flexible members aren't properly handled
// https://github.com/google/re2/commit/b94b7cd42e9f02673cd748c1ac1d16db4052514c
#if !defined(__clang__) && defined(__GNUC__) && __GNUC__ == 6 && \
__GNUC_MINOR__ >= 1
int data[0];
#else
int data[];
#endif
} TfLiteIntArray;
// Given the size (number of elements) in a TfLiteIntArray, calculate its size
// in bytes.
int TfLiteIntArrayGetSizeInBytes(int size);
#ifndef TF_LITE_STATIC_MEMORY
// Create a array of a given `size` (uninitialized entries).
// This returns a pointer, that you must free using TfLiteIntArrayFree().
TfLiteIntArray* TfLiteIntArrayCreate(int size);
#endif
// Check if two intarrays are equal. Returns 1 if they are equal, 0 otherwise.
int TfLiteIntArrayEqual(const TfLiteIntArray* a, const TfLiteIntArray* b);
// Check if an intarray equals an array. Returns 1 if equals, 0 otherwise.
int TfLiteIntArrayEqualsArray(const TfLiteIntArray* a, int b_size,
const int b_data[]);
#ifndef TF_LITE_STATIC_MEMORY
// Create a copy of an array passed as `src`.
// You are expected to free memory with TfLiteIntArrayFree
TfLiteIntArray* TfLiteIntArrayCopy(const TfLiteIntArray* src);
// Free memory of array `a`.
void TfLiteIntArrayFree(TfLiteIntArray* a);
#endif // TF_LITE_STATIC_MEMORY
// Fixed size list of floats. Used for per-channel quantization.
typedef struct TfLiteFloatArray {
int size;
// gcc 6.1+ have a bug where flexible members aren't properly handled
// https://github.com/google/re2/commit/b94b7cd42e9f02673cd748c1ac1d16db4052514c
#if !defined(__clang__) && defined(__GNUC__) && __GNUC__ == 6 && \
__GNUC_MINOR__ >= 1
float data[0];
#else
float data[];
#endif
} TfLiteFloatArray;
// Given the size (number of elements) in a TfLiteFloatArray, calculate its size
// in bytes.
int TfLiteFloatArrayGetSizeInBytes(int size);
#ifndef TF_LITE_STATIC_MEMORY
// Create a array of a given `size` (uninitialized entries).
// This returns a pointer, that you must free using TfLiteFloatArrayFree().
TfLiteFloatArray* TfLiteFloatArrayCreate(int size);
// Free memory of array `a`.
void TfLiteFloatArrayFree(TfLiteFloatArray* a);
#endif // TF_LITE_STATIC_MEMORY
// Since we must not depend on any libraries, define a minimal subset of
// error macros while avoiding names that have pre-conceived meanings like
// assert and check.
// Try to make all reporting calls through TF_LITE_KERNEL_LOG rather than
// calling the context->ReportError function directly, so that message strings
// can be stripped out if the binary size needs to be severely optimized.
#ifndef TF_LITE_STRIP_ERROR_STRINGS
#define TF_LITE_KERNEL_LOG(context, ...) \
do { \
(context)->ReportError((context), __VA_ARGS__); \
} while (false)
#else // TF_LITE_STRIP_ERROR_STRINGS
#define TF_LITE_KERNEL_LOG(context, ...)
#endif // TF_LITE_STRIP_ERROR_STRINGS
// Check whether value is true, and if not return kTfLiteError from
// the current function (and report the error string msg).
#define TF_LITE_ENSURE_MSG(context, value, msg) \
do { \
if (!(value)) { \
TF_LITE_KERNEL_LOG((context), __FILE__ " " msg); \
return kTfLiteError; \
} \
} while (0)
// Check whether the value `a` is true, and if not return kTfLiteError from
// the current function, while also reporting the location of the error.
#define TF_LITE_ENSURE(context, a) \
do { \
if (!(a)) { \
TF_LITE_KERNEL_LOG((context), "%s:%d %s was not true.", __FILE__, \
__LINE__, #a); \
return kTfLiteError; \
} \
} while (0)
#define TF_LITE_ENSURE_STATUS(a) \
do { \
if ((a) != kTfLiteOk) { \
return kTfLiteError; \
} \
} while (0)
// Check whether the value `a == b` is true, and if not return kTfLiteError from
// the current function, while also reporting the location of the error.
// `a` and `b` may be evaluated more than once, so no side effects or
// extremely expensive computations should be done.
#define TF_LITE_ENSURE_EQ(context, a, b) \
do { \
if ((a) != (b)) { \
TF_LITE_KERNEL_LOG((context), "%s:%d %s != %s (%d != %d)", __FILE__, \
__LINE__, #a, #b, (a), (b)); \
return kTfLiteError; \
} \
} while (0)
#define TF_LITE_ENSURE_TYPES_EQ(context, a, b) \
do { \
if ((a) != (b)) { \
TF_LITE_KERNEL_LOG((context), "%s:%d %s != %s (%s != %s)", __FILE__, \
__LINE__, #a, #b, TfLiteTypeGetName(a), \
TfLiteTypeGetName(b)); \
return kTfLiteError; \
} \
} while (0)
#define TF_LITE_ENSURE_OK(context, status) \
do { \
if ((status) != kTfLiteOk) { \
return kTfLiteError; \
} \
} while (0)
// Single-precision complex data type compatible with the C99 definition.
typedef struct TfLiteComplex64 {
float re, im; // real and imaginary parts, respectively.
} TfLiteComplex64;
// Half precision data type compatible with the C99 definition.
typedef struct TfLiteFloat16 {
uint16_t data;
} TfLiteFloat16;
// Types supported by tensor
typedef enum {
kTfLiteNoType = 0,
kTfLiteFloat32 = 1,
kTfLiteInt32 = 2,
kTfLiteUInt8 = 3,
kTfLiteInt64 = 4,
kTfLiteString = 5,
kTfLiteBool = 6,
kTfLiteInt16 = 7,
kTfLiteComplex64 = 8,
kTfLiteInt8 = 9,
kTfLiteFloat16 = 10,
} TfLiteType;
// Return the name of a given type, for error reporting purposes.
const char* TfLiteTypeGetName(TfLiteType type);
// SupportedQuantizationTypes.
typedef enum TfLiteQuantizationType {
// No quantization.
kTfLiteNoQuantization = 0,
// Affine quantization (with support for per-channel quantization).
// Corresponds to TfLiteAffineQuantization.
kTfLiteAffineQuantization = 1,
} TfLiteQuantizationType;
// Structure specifying the quantization used by the tensor, if-any.
typedef struct TfLiteQuantization {
// The type of quantization held by params.
TfLiteQuantizationType type;
// Holds a reference to one of the quantization param structures specified
// below.
void* params;
} TfLiteQuantization;
// Legacy. Will be deprecated in favor of TfLiteAffineQuantization.
// If per-layer quantization is specified this field will still be populated in
// addition to TfLiteAffineQuantization.
// Parameters for asymmetric quantization. Quantized values can be converted
// back to float using:
// real_value = scale * (quantized_value - zero_point)
typedef struct TfLiteQuantizationParams {
float scale;
int32_t zero_point;
} TfLiteQuantizationParams;
// Parameters for asymmetric quantization across a dimension (i.e per output
// channel quantization).
// quantized_dimension specifies which dimension the scales and zero_points
// correspond to.
// For a particular value in quantized_dimension, quantized values can be
// converted back to float using:
// real_value = scale * (quantized_value - zero_point)
typedef struct TfLiteAffineQuantization {
TfLiteFloatArray* scale;
TfLiteIntArray* zero_point;
int32_t quantized_dimension;
} TfLiteAffineQuantization;
/* A union of pointers that points to memory for a given tensor. */
typedef union TfLitePtrUnion {
/* Do not access these members directly, if possible, use
* GetTensorData<TYPE>(tensor) instead, otherwise only access .data, as other
* members are deprecated. */
int32_t* i32;
int64_t* i64;
float* f;
TfLiteFloat16* f16;
char* raw;
const char* raw_const;
uint8_t* uint8;
bool* b;
int16_t* i16;
TfLiteComplex64* c64;
int8_t* int8;
/* Only use this member. */
void* data;
} TfLitePtrUnion;
// Memory allocation strategies. kTfLiteMmapRo is for read-only memory-mapped
// data (or data externally allocated). kTfLiteArenaRw is arena allocated
// data. kTfLiteDynamic is for tensors that are allocated during evaluation.
typedef enum TfLiteAllocationType {
kTfLiteMemNone = 0,
kTfLiteMmapRo,
kTfLiteArenaRw,
kTfLiteArenaRwPersistent,
kTfLiteDynamic,
} TfLiteAllocationType;
// The delegates should use zero or positive integers to represent handles.
// -1 is reserved from unallocated status.
typedef int TfLiteBufferHandle;
enum {
kTfLiteNullBufferHandle = -1,
};
// Storage format of each dimension in a sparse tensor.
typedef enum TfLiteDimensionType {
kTfLiteDimDense = 0,
kTfLiteDimSparseCSR,
} TfLiteDimensionType;
// Metadata to encode each dimension in a sparse tensor.
typedef struct TfLiteDimensionMetadata {
TfLiteDimensionType format;
int dense_size;
TfLiteIntArray* array_segments;
TfLiteIntArray* array_indices;
} TfLiteDimensionMetadata;
// Parameters used to encode a sparse tensor. For detailed explanation of each
// field please refer to lite/schema/schema.fbs.
typedef struct TfLiteSparsity {
TfLiteIntArray* traversal_order;
TfLiteIntArray* block_map;
TfLiteDimensionMetadata* dim_metadata;
int dim_metadata_size;
} TfLiteSparsity;
// An tensor in the interpreter system which is a wrapper around a buffer of
// data including a dimensionality (or NULL if not currently defined).
typedef struct TfLiteTensor {
// The data type specification for data stored in `data`. This affects
// what member of `data` union should be used.
TfLiteType type;
// A union of data pointers. The appropriate type should be used for a typed
// tensor based on `type`.
TfLitePtrUnion data;
// A pointer to a structure representing the dimensionality interpretation
// that the buffer should have. NOTE: the product of elements of `dims`
// and the element datatype size should be equal to `bytes` below.
TfLiteIntArray* dims;
// Quantization information.
TfLiteQuantizationParams params;
// How memory is mapped
// kTfLiteMmapRo: Memory mapped read only.
// i.e. weights
// kTfLiteArenaRw: Arena allocated read write memory
// (i.e. temporaries, outputs).
TfLiteAllocationType allocation_type;
// The number of bytes required to store the data of this Tensor. I.e.
// (bytes of each element) * dims[0] * ... * dims[n-1]. For example, if
// type is kTfLiteFloat32 and dims = {3, 2} then
// bytes = sizeof(float) * 3 * 2 = 4 * 3 * 2 = 24.
size_t bytes;
// An opaque pointer to a tflite::MMapAllocation
const void* allocation;
// Null-terminated name of this tensor.
const char* name;
// The delegate which knows how to handle `buffer_handle`.
// WARNING: This is an experimental interface that is subject to change.
struct TfLiteDelegate* delegate;
// An integer buffer handle that can be handled by `delegate`.
// The value is valid only when delegate is not null.
// WARNING: This is an experimental interface that is subject to change.
TfLiteBufferHandle buffer_handle;
// If the delegate uses its own buffer (e.g. GPU memory), the delegate is
// responsible to set data_is_stale to true.
// `delegate->CopyFromBufferHandle` can be called to copy the data from
// delegate buffer.
// WARNING: This is an // experimental interface that is subject to change.
bool data_is_stale;
// True if the tensor is a variable.
bool is_variable;
// Quantization information. Replaces params field above.
TfLiteQuantization quantization;
// Parameters used to encode a sparse tensor.
// This is optional. The field is NULL if a tensor is dense.
// WARNING: This is an experimental interface that is subject to change.
TfLiteSparsity* sparsity;
// Optional. Encodes shapes with unknown dimensions with -1. This field is
// only populated when unknown dimensions exist in a read-write tensor (i.e.
// an input or output tensor). (e.g. `dims` contains [1, 1, 1, 3] and
// `dims_signature` contains [1, -1, -1, 3]).
const TfLiteIntArray* dims_signature;
} TfLiteTensor;
#ifndef TF_LITE_STATIC_MEMORY
// Free data memory of tensor `t`.
void TfLiteTensorDataFree(TfLiteTensor* t);
// Free quantization data.
void TfLiteQuantizationFree(TfLiteQuantization* quantization);
// Free sparsity parameters.
void TfLiteSparsityFree(TfLiteSparsity* sparsity);
// Free memory of tensor `t`.
void TfLiteTensorFree(TfLiteTensor* t);
// Set all of a tensor's fields (and free any previously allocated data).
void TfLiteTensorReset(TfLiteType type, const char* name, TfLiteIntArray* dims,
TfLiteQuantizationParams quantization, char* buffer,
size_t size, TfLiteAllocationType allocation_type,
const void* allocation, bool is_variable,
TfLiteTensor* tensor);
// Resize the allocated data of a (dynamic) tensor. Tensors with allocation
// types other than kTfLiteDynamic will be ignored.
void TfLiteTensorRealloc(size_t num_bytes, TfLiteTensor* tensor);
#endif // TF_LITE_STATIC_MEMORY
// A structure representing an instance of a node.
// This structure only exhibits the inputs, outputs and user defined data, not
// other features like the type.
typedef struct TfLiteNode {
// Inputs to this node expressed as indices into the simulator's tensors.
TfLiteIntArray* inputs;
// Outputs to this node expressed as indices into the simulator's tensors.
TfLiteIntArray* outputs;
// intermediate tensors to this node expressed as indices into the simulator's
// tensors.
TfLiteIntArray* intermediates;
// Temporary tensors uses during the computations. This usually contains no
// tensors, but ops are allowed to change that if they need scratch space of
// any sort.
TfLiteIntArray* temporaries;
// Opaque data provided by the node implementer through `Registration.init`.
void* user_data;
// Opaque data provided to the node if the node is a builtin. This is usually
// a structure defined in builtin_op_data.h
void* builtin_data;
// Custom initial data. This is the opaque data provided in the flatbuffer.
// WARNING: This is an experimental interface that is subject to change.
const void* custom_initial_data;
int custom_initial_data_size;
// The pointer to the delegate. This is non-null only when the node is
// created by calling `interpreter.ModifyGraphWithDelegate`.
// WARNING: This is an experimental interface that is subject to change.
struct TfLiteDelegate* delegate;
} TfLiteNode;
// WARNING: This is an experimental interface that is subject to change.
//
// Currently, TfLiteDelegateParams has to be allocated in a way that it's
// trivially destructable. It will be stored as `builtin_data` field in
// `TfLiteNode` of the delegate node.
//
// See also the `CreateDelegateParams` function in `interpreter.cc` details.
typedef struct TfLiteDelegateParams {
struct TfLiteDelegate* delegate;
TfLiteIntArray* nodes_to_replace;
TfLiteIntArray* input_tensors;
TfLiteIntArray* output_tensors;
} TfLiteDelegateParams;
typedef struct TfLiteContext {
// Number of tensors in the context.
size_t tensors_size;
// The execution plan contains a list of the node indices in execution
// order. execution_plan->size is the current number of nodes. And,
// execution_plan->data[0] is the first node that needs to be run.
// TfLiteDelegates can traverse the current execution plan by iterating
// through each member of this array and using GetNodeAndRegistration() to
// access details about a node. i.e.
// TfLiteIntArray* execution_plan;
// TF_LITE_ENSURE_STATUS(context->GetExecutionPlan(context, &execution_plan));
// for (int exec_index = 0; exec_index < execution_plan->size; exec_index++) {
// int node_index = execution_plan->data[exec_index];
// TfLiteNode* node;
// TfLiteRegistration* reg;
// context->GetNodeAndRegistration(context, node_index, &node, &reg);
// }
// WARNING: This is an experimental interface that is subject to change.
TfLiteStatus (*GetExecutionPlan)(struct TfLiteContext* context,
TfLiteIntArray** execution_plan);
// An array of tensors in the interpreter context (of length `tensors_size`)
TfLiteTensor* tensors;
// opaque full context ptr (an opaque c++ data structure)
void* impl_;
// Request memory pointer be resized. Updates dimensions on the tensor.
// NOTE: ResizeTensor takes ownership of newSize.
TfLiteStatus (*ResizeTensor)(struct TfLiteContext*, TfLiteTensor* tensor,
TfLiteIntArray* new_size);
// Request that an error be reported with format string msg.
void (*ReportError)(struct TfLiteContext*, const char* msg, ...);
// Add `tensors_to_add` tensors, preserving pre-existing Tensor entries. If
// non-null, the value pointed to by `first_new_tensor_index` will be set to
// the index of the first new tensor.
TfLiteStatus (*AddTensors)(struct TfLiteContext*, int tensors_to_add,
int* first_new_tensor_index);
// Get a Tensor node by node_index.
// WARNING: This is an experimental interface that is subject to change.
TfLiteStatus (*GetNodeAndRegistration)(
struct TfLiteContext*, int node_index, TfLiteNode** node,
struct TfLiteRegistration** registration);
// Replace ops with one or more stub delegate operations. This function
// does not take ownership of `nodes_to_replace`.
TfLiteStatus (*ReplaceNodeSubsetsWithDelegateKernels)(
struct TfLiteContext*, struct TfLiteRegistration registration,
const TfLiteIntArray* nodes_to_replace, struct TfLiteDelegate* delegate);
// Number of threads that are recommended to subsystems like gemmlowp and
// eigen.
int recommended_num_threads;
// Access external contexts by type.
// WARNING: This is an experimental interface that is subject to change.
TfLiteExternalContext* (*GetExternalContext)(struct TfLiteContext*,
TfLiteExternalContextType);
// Set the value of a external context. Does not take ownership of the
// pointer.
// WARNING: This is an experimental interface that is subject to change.
void (*SetExternalContext)(struct TfLiteContext*, TfLiteExternalContextType,
TfLiteExternalContext*);
// Flag for allowing float16 precision for FP32 calculation.
// default: false.
// WARNING: This is an experimental API and subject to change.
bool allow_fp32_relax_to_fp16;
// Pointer to the op-level profiler, if set; nullptr otherwise.
void* profiler;
// Allocate persistent buffer which has the same life time as the interpreter.
// The memory is allocated from heap for TFL, and from tail in TFLM.
// If *ptr is not nullptr, the pointer will be reallocated.
// This method is only available in Prepare stage.
// WARNING: This is an experimental interface that is subject to change.
TfLiteStatus (*AllocatePersistentBuffer)(struct TfLiteContext* ctx,
size_t bytes, void** ptr);
// Allocate a buffer which will be deallocated right after invoke phase.
// The memory is allocated from heap in TFL, and from volatile arena in TFLM.
// This method is only available in invoke stage.
// NOTE: If possible use RequestScratchBufferInArena method to avoid memory
// allocation during inference time.
// WARNING: This is an experimental interface that is subject to change.
TfLiteStatus (*AllocateBufferForEval)(struct TfLiteContext* ctx, size_t bytes,
void** ptr);
// Request a scratch buffer in the arena through static memory planning.
// This method is only available in Prepare stage and the buffer is allocated
// by the interpreter between Prepare and Eval stage. In Eval stage,
// GetScratchBuffer API can be used to fetch the address.
// WARNING: This is an experimental interface that is subject to change.
TfLiteStatus (*RequestScratchBufferInArena)(struct TfLiteContext* ctx,
size_t bytes, int* buffer_idx);
// Get the scratch buffer pointer.
// This method is only available in Eval stage.
// WARNING: This is an experimental interface that is subject to change.
void* (*GetScratchBuffer)(struct TfLiteContext* ctx, int buffer_idx);
// Resize the memory pointer of the `tensor`. This method behaves the same as
// `ResizeTensor`, except that it makes a copy of the shape array internally
// so the shape array could be deallocated right afterwards.
// WARNING: This is an experimental interface that is subject to change.
TfLiteStatus (*ResizeTensorExplicit)(struct TfLiteContext* ctx,
TfLiteTensor* tensor, int dims,
const int* shape);
// This method provides a preview of post-delegation partitioning. Each
// TfLiteDelegateParams in the referenced array corresponds to one instance of
// the delegate kernel.
// Example usage:
//
// TfLiteIntArray* nodes_to_replace = ...;
// TfLiteDelegateParams* params_array;
// int num_partitions = 0;
// TF_LITE_ENSURE_STATUS(context->PreviewDelegatePartitioning(
// context, delegate, nodes_to_replace, &params_array, &num_partitions));
// for (int idx = 0; idx < num_partitions; idx++) {
// const auto& partition_params = params_array[idx];
// ...
// }
//
// NOTE: The context owns the memory referenced by partition_params_array. It
// will be cleared with another call to PreviewDelegateParitioning, or after
// TfLiteDelegateParams::Prepare returns.
//
// WARNING: This is an experimental interface that is subject to change.
TfLiteStatus (*PreviewDelegatePartitioning)(
struct TfLiteContext* context, const TfLiteIntArray* nodes_to_replace,
TfLiteDelegateParams** partition_params_array, int* num_partitions);
} TfLiteContext;
typedef struct TfLiteRegistration {
// Initializes the op from serialized data.
// If a built-in op:
// `buffer` is the op's params data (TfLiteLSTMParams*).
// `length` is zero.
// If custom op:
// `buffer` is the op's `custom_options`.
// `length` is the size of the buffer.
//
// Returns a type-punned (i.e. void*) opaque data (e.g. a primitive pointer
// or an instance of a struct).
//
// The returned pointer will be stored with the node in the `user_data` field,
// accessible within prepare and invoke functions below.
// NOTE: if the data is already in the desired format, simply implement this
// function to return `nullptr` and implement the free function to be a no-op.
void* (*init)(TfLiteContext* context, const char* buffer, size_t length);
// The pointer `buffer` is the data previously returned by an init invocation.
void (*free)(TfLiteContext* context, void* buffer);
// prepare is called when the inputs this node depends on have been resized.
// context->ResizeTensor() can be called to request output tensors to be
// resized.
//
// Returns kTfLiteOk on success.
TfLiteStatus (*prepare)(TfLiteContext* context, TfLiteNode* node);
// Execute the node (should read node->inputs and output to node->outputs).
// Returns kTfLiteOk on success.
TfLiteStatus (*invoke)(TfLiteContext* context, TfLiteNode* node);
// profiling_string is called during summarization of profiling information
// in order to group executions together. Providing a value here will cause a
// given op to appear multiple times is the profiling report. This is
// particularly useful for custom ops that can perform significantly
// different calculations depending on their `user-data`.
const char* (*profiling_string)(const TfLiteContext* context,
const TfLiteNode* node);
// Builtin codes. If this kernel refers to a builtin this is the code
// of the builtin. This is so we can do marshaling to other frameworks like
// NN API.
// Note: It is the responsibility of the registration binder to set this
// properly.
int32_t builtin_code;
// Custom op name. If the op is a builtin, this will be null.
// Note: It is the responsibility of the registration binder to set this
// properly.
// WARNING: This is an experimental interface that is subject to change.
const char* custom_name;
// The version of the op.
// Note: It is the responsibility of the registration binder to set this
// properly.
int version;
} TfLiteRegistration;
// The flags used in `TfLiteDelegate`. Note that this is a bitmask, so the
// values should be 1, 2, 4, 8, ...etc.
typedef enum TfLiteDelegateFlags {
kTfLiteDelegateFlagsNone = 0,
// The flag is set if the delegate can handle dynamic sized tensors.
// For example, the output shape of a `Resize` op with non-constant shape
// can only be inferred when the op is invoked.
// In this case, the Delegate is responsible for calling
// `SetTensorToDynamic` to mark the tensor as a dynamic tensor, and calling
// `ResizeTensor` when invoking the op.
//
// If the delegate isn't capable to handle dynamic tensors, this flag need
// to be set to false.
kTfLiteDelegateFlagsAllowDynamicTensors = 1
} TfLiteDelegateFlags;
// WARNING: This is an experimental interface that is subject to change.
typedef struct TfLiteDelegate {
// Data that delegate needs to identify itself. This data is owned by the
// delegate. The delegate is owned in the user code, so the delegate is
// responsible for doing this when it is destroyed.
void* data_;
// Invoked by ModifyGraphWithDelegate. This prepare is called, giving the
// delegate a view of the current graph through TfLiteContext*. It typically
// will look at the nodes and call ReplaceNodeSubsetsWithDelegateKernels()
// to ask the TensorFlow lite runtime to create macro-nodes to represent
// delegated subgraphs of the original graph.
TfLiteStatus (*Prepare)(TfLiteContext* context,
struct TfLiteDelegate* delegate);
// Copy the data from delegate buffer handle into raw memory of the given
// 'tensor'. This cannot be null. The delegate is allowed to allocate the raw
// bytes as long as it follows the rules for kTfLiteDynamic tensors.
TfLiteStatus (*CopyFromBufferHandle)(TfLiteContext* context,
struct TfLiteDelegate* delegate,
TfLiteBufferHandle buffer_handle,
TfLiteTensor* tensor);
// Copy the data from raw memory of the given 'tensor' to delegate buffer
// handle. This can be null if the delegate doesn't use its own buffer.
TfLiteStatus (*CopyToBufferHandle)(TfLiteContext* context,
struct TfLiteDelegate* delegate,
TfLiteBufferHandle buffer_handle,
TfLiteTensor* tensor);
// Free the Delegate Buffer Handle. Note: This only frees the handle, but
// this doesn't release the underlying resource (e.g. textures). The
// resources are either owned by application layer or the delegate.
// This can be null if the delegate doesn't use its own buffer.
void (*FreeBufferHandle)(TfLiteContext* context,
struct TfLiteDelegate* delegate,
TfLiteBufferHandle* handle);
// Bitmask flags. See the comments in `TfLiteDelegateFlags`.
int64_t flags;
} TfLiteDelegate;
// Build a 'null' delegate, with all the fields properly set to their default
// values.
TfLiteDelegate TfLiteDelegateCreate();
#ifdef __cplusplus
} // extern "C"
#endif // __cplusplus
#endif // TENSORFLOW_LITE_C_COMMON_H_
/* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef TENSORFLOW_LITE_DELEGATES_GPU_METAL_DELEGATE_H_
#define TENSORFLOW_LITE_DELEGATES_GPU_METAL_DELEGATE_H_
#ifdef SWIG
#define TFL_CAPI_EXPORT
#else
#if defined(_WIN32)
#ifdef TFL_COMPILE_LIBRARY
#define TFL_CAPI_EXPORT __declspec(dllexport)
#else
#define TFL_CAPI_EXPORT __declspec(dllimport)
#endif // TFL_COMPILE_LIBRARY
#else
#define TFL_CAPI_EXPORT __attribute__((visibility("default")))
#endif // _WIN32
#endif // SWIG
#ifdef __cplusplus
extern "C" {
#else
// For "C" 'bool' is not built-in type.
#include <stdbool.h>
#endif // __cplusplus
typedef struct TfLiteDelegate TfLiteDelegate;
typedef enum {
// waitUntilCompleted
TFLGpuDelegateWaitTypePassive,
// Minimize latency. It uses active spinning instead of mutex and consumes
// additional CPU resources.
TFLGpuDelegateWaitTypeActive,
// Useful when the output is used with GPU pipeline then or if external
// command encoder is set.
TFLGpuDelegateWaitTypeDoNotWait,
// Tries to avoid GPU sleep mode.
TFLGpuDelegateWaitTypeAggressive,
} TFLGpuDelegateWaitType;
// Creates a new delegate instance that need to be destroyed with
// DeleteFlowDelegate when delegate is no longer used by tflite.
typedef struct {
// Allows to quantify tensors, downcast values, process in float16 etc.
bool allow_precision_loss;
TFLGpuDelegateWaitType wait_type;
} TFLGpuDelegateOptions;
// Creates a new delegate instance that need to be destroyed with
// `TFLDeleteTfLiteGpuDelegate` when delegate is no longer used by TFLite.
// When `options` is set to `nullptr`, the following default values are used:
// .precision_loss_allowed = false,
// .wait_type = kPassive,
TFL_CAPI_EXPORT extern TfLiteDelegate* TFLGpuDelegateCreate(
const TFLGpuDelegateOptions* options);
// Destroys a delegate created with `TFLGpuDelegateCreate` call.
TFL_CAPI_EXPORT extern void TFLGpuDelegateDelete(TfLiteDelegate* delegate);
#ifdef __cplusplus
} // extern "C"
#endif // __cplusplus
#endif // TENSORFLOW_LITE_DELEGATES_GPU_METAL_DELEGATE_H_
framework module TensorFlowLiteC {
umbrella header "TensorFlowLiteC.h"
export *
module * { export * }
link framework "Metal"
}
Copyright 2019 The TensorFlow Authors. All rights reserved.
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
<div align="center">
<img src="https://www.tensorflow.org/images/tf_logo_social.png">
</div>
**`Documentation`** |
------------------- |
[![Documentation](https://img.shields.io/badge/api-reference-blue.svg)](https://www.tensorflow.org/api_docs/) |
[TensorFlow](https://www.tensorflow.org/) is an end-to-end open source platform
for machine learning. It has a comprehensive, flexible ecosystem of
[tools](https://www.tensorflow.org/resources/tools),
[libraries](https://www.tensorflow.org/resources/libraries-extensions), and
[community](https://www.tensorflow.org/community) resources that lets
researchers push the state-of-the-art in ML and developers easily build and
deploy ML-powered applications.
TensorFlow was originally developed by researchers and engineers working on the
Google Brain team within Google's Machine Intelligence Research organization to
conduct machine learning and deep neural networks research. The system is
general enough to be applicable in a wide variety of other domains, as well.
TensorFlow provides stable [Python](https://www.tensorflow.org/api_docs/python)
and [C++](https://www.tensorflow.org/api_docs/cc) APIs, as well as
non-guaranteed backward compatible API for
[other languages](https://www.tensorflow.org/api_docs).
Keep up-to-date with release announcements and security updates by subscribing
to
[announce@tensorflow.org](https://groups.google.com/a/tensorflow.org/forum/#!forum/announce).
See all the [mailing lists](https://www.tensorflow.org/community/forums).
## Install
See the [TensorFlow install guide](https://www.tensorflow.org/install) for the
[pip package](https://www.tensorflow.org/install/pip), to
[enable GPU support](https://www.tensorflow.org/install/gpu), use a
[Docker container](https://www.tensorflow.org/install/docker), and
[build from source](https://www.tensorflow.org/install/source).
To install the current release, which includes support for
[CUDA-enabled GPU cards](https://www.tensorflow.org/install/gpu) *(Ubuntu and
Windows)*:
```
$ pip install tensorflow
```
A smaller CPU-only package is also available:
```
$ pip install tensorflow-cpu
```
To update TensorFlow to the latest version, add `--upgrade` flag to the above
commands.
*Nightly binaries are available for testing using the
[tf-nightly](https://pypi.python.org/pypi/tf-nightly) and
[tf-nightly-cpu](https://pypi.python.org/pypi/tf-nightly-cpu) packages on PyPi.*
#### *Try your first TensorFlow program*
```shell
$ python
```
```python
>>> import tensorflow as tf
>>> tf.add(1, 2).numpy()
3
>>> hello = tf.constant('Hello, TensorFlow!')
>>> hello.numpy()
b'Hello, TensorFlow!'
```
For more examples, see the
[TensorFlow tutorials](https://www.tensorflow.org/tutorials/).
## Contribution guidelines
**If you want to contribute to TensorFlow, be sure to review the
[contribution guidelines](CONTRIBUTING.md). This project adheres to TensorFlow's
[code of conduct](CODE_OF_CONDUCT.md). By participating, you are expected to
uphold this code.**
**We use [GitHub issues](https://github.com/tensorflow/tensorflow/issues) for
tracking requests and bugs, please see
[TensorFlow Discuss](https://groups.google.com/a/tensorflow.org/forum/#!forum/discuss)
for general questions and discussion, and please direct specific questions to
[Stack Overflow](https://stackoverflow.com/questions/tagged/tensorflow).**
The TensorFlow project strives to abide by generally accepted best practices in
open-source software development:
[![CII Best Practices](https://bestpractices.coreinfrastructure.org/projects/1486/badge)](https://bestpractices.coreinfrastructure.org/projects/1486)
[![Contributor Covenant](https://img.shields.io/badge/Contributor%20Covenant-v1.4%20adopted-ff69b4.svg)](CODE_OF_CONDUCT.md)
## Continuous build status
### Official Builds
Build Type | Status | Artifacts
------------------------ | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------
**Linux CPU** | [![Status](https://storage.googleapis.com/tensorflow-kokoro-build-badges/ubuntu-cc.svg)](https://storage.googleapis.com/tensorflow-kokoro-build-badges/ubuntu-cc.html) | [PyPI](https://pypi.org/project/tf-nightly/)
**Linux GPU** | [![Status](https://storage.googleapis.com/tensorflow-kokoro-build-badges/ubuntu-gpu-py3.svg)](https://storage.googleapis.com/tensorflow-kokoro-build-badges/ubuntu-gpu-py3.html) | [PyPI](https://pypi.org/project/tf-nightly-gpu/)
**Linux XLA** | [![Status](https://storage.googleapis.com/tensorflow-kokoro-build-badges/ubuntu-xla.svg)](https://storage.googleapis.com/tensorflow-kokoro-build-badges/ubuntu-xla.html) | TBA
**macOS** | [![Status](https://storage.googleapis.com/tensorflow-kokoro-build-badges/macos-py2-cc.svg)](https://storage.googleapis.com/tensorflow-kokoro-build-badges/macos-py2-cc.html) | [PyPI](https://pypi.org/project/tf-nightly/)
**Windows CPU** | [![Status](https://storage.googleapis.com/tensorflow-kokoro-build-badges/windows-cpu.svg)](https://storage.googleapis.com/tensorflow-kokoro-build-badges/windows-cpu.html) | [PyPI](https://pypi.org/project/tf-nightly/)
**Windows GPU** | [![Status](https://storage.googleapis.com/tensorflow-kokoro-build-badges/windows-gpu.svg)](https://storage.googleapis.com/tensorflow-kokoro-build-badges/windows-gpu.html) | [PyPI](https://pypi.org/project/tf-nightly-gpu/)
**Android** | [![Status](https://storage.googleapis.com/tensorflow-kokoro-build-badges/android.svg)](https://storage.googleapis.com/tensorflow-kokoro-build-badges/android.html) | [![Download](https://api.bintray.com/packages/google/tensorflow/tensorflow/images/download.svg)](https://bintray.com/google/tensorflow/tensorflow/_latestVersion)
**Raspberry Pi 0 and 1** | [![Status](https://storage.googleapis.com/tensorflow-kokoro-build-badges/rpi01-py2.svg)](https://storage.googleapis.com/tensorflow-kokoro-build-badges/rpi01-py2.html) [![Status](https://storage.googleapis.com/tensorflow-kokoro-build-badges/rpi01-py3.svg)](https://storage.googleapis.com/tensorflow-kokoro-build-badges/rpi01-py3.html) | [Py2](https://storage.googleapis.com/tensorflow-nightly/tensorflow-1.10.0-cp27-none-linux_armv6l.whl) [Py3](https://storage.googleapis.com/tensorflow-nightly/tensorflow-1.10.0-cp34-none-linux_armv6l.whl)
**Raspberry Pi 2 and 3** | [![Status](https://storage.googleapis.com/tensorflow-kokoro-build-badges/rpi23-py2.svg)](https://storage.googleapis.com/tensorflow-kokoro-build-badges/rpi23-py2.html) [![Status](https://storage.googleapis.com/tensorflow-kokoro-build-badges/rpi23-py3.svg)](https://storage.googleapis.com/tensorflow-kokoro-build-badges/rpi23-py3.html) | [Py2](https://storage.googleapis.com/tensorflow-nightly/tensorflow-1.10.0-cp27-none-linux_armv7l.whl) [Py3](https://storage.googleapis.com/tensorflow-nightly/tensorflow-1.10.0-cp34-none-linux_armv7l.whl)
### Community Supported Builds
Build Type | Status | Artifacts
----------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------
**Linux AMD ROCm GPU** Nightly | [![Build Status](http://ml-ci.amd.com:21096/job/tensorflow-rocm-nightly/badge/icon)](http://ml-ci.amd.com:21096/job/tensorflow-rocm-nightly) | [Nightly](http://ml-ci.amd.com:21096/job/tensorflow-rocm-nightly/lastSuccessfulBuild/)
**Linux AMD ROCm GPU** Stable Release | [![Build Status](http://ml-ci.amd.com:21096/job/tensorflow-rocm-release/badge/icon)](http://ml-ci.amd.com:21096/job/tensorflow-rocm-release/) | Release [1.15](http://ml-ci.amd.com:21096/job/tensorflow-rocm-release/lastSuccessfulBuild/) / [2.x](http://ml-ci.amd.com:21096/job/tensorflow-rocm-v2-release/lastSuccessfulBuild/)
**Linux s390x** Nightly | [![Build Status](http://ibmz-ci.osuosl.org/job/TensorFlow_IBMZ_CI/badge/icon)](http://ibmz-ci.osuosl.org/job/TensorFlow_IBMZ_CI/) | [Nightly](http://ibmz-ci.osuosl.org/job/TensorFlow_IBMZ_CI/)
**Linux s390x CPU** Stable Release | [![Build Status](http://ibmz-ci.osuosl.org/job/TensorFlow_IBMZ_Release_Build/badge/icon)](https://ibmz-ci.osuosl.org/job/TensorFlow_IBMZ_Release_Build/) | [Release](https://ibmz-ci.osuosl.org/job/TensorFlow_IBMZ_Release_Build/)
**Linux ppc64le CPU** Nightly | [![Build Status](https://powerci.osuosl.org/job/TensorFlow_PPC64LE_CPU_Build/badge/icon)](https://powerci.osuosl.org/job/TensorFlow_PPC64LE_CPU_Build/) | [Nightly](https://powerci.osuosl.org/job/TensorFlow_PPC64LE_CPU_Nightly_Artifact/)
**Linux ppc64le CPU** Stable Release | [![Build Status](https://powerci.osuosl.org/job/TensorFlow_PPC64LE_CPU_Release_Build/badge/icon)](https://powerci.osuosl.org/job/TensorFlow_PPC64LE_CPU_Release_Build/) | Release [1.15](https://powerci.osuosl.org/job/TensorFlow_PPC64LE_CPU_Release_Build/) / [2.x](https://powerci.osuosl.org/job/TensorFlow2_PPC64LE_CPU_Release_Build/)
**Linux ppc64le GPU** Nightly | [![Build Status](https://powerci.osuosl.org/job/TensorFlow_PPC64LE_GPU_Build/badge/icon)](https://powerci.osuosl.org/job/TensorFlow_PPC64LE_GPU_Build/) | [Nightly](https://powerci.osuosl.org/job/TensorFlow_PPC64LE_GPU_Nightly_Artifact/)
**Linux ppc64le GPU** Stable Release | [![Build Status](https://powerci.osuosl.org/job/TensorFlow_PPC64LE_GPU_Release_Build/badge/icon)](https://powerci.osuosl.org/job/TensorFlow_PPC64LE_GPU_Release_Build/) | Release [1.15](https://powerci.osuosl.org/job/TensorFlow_PPC64LE_GPU_Release_Build/) / [2.x](https://powerci.osuosl.org/job/TensorFlow2_PPC64LE_GPU_Release_Build/)
**Linux CPU with Intel® MKL-DNN** Nightly | [![Build Status](https://tensorflow-ci.intel.com/job/tensorflow-mkl-build-whl-nightly/badge/icon)](https://tensorflow-ci.intel.com/job/tensorflow-mkl-build-whl-nightly/) | [Nightly](https://tensorflow-ci.intel.com/job/tensorflow-mkl-build-whl-nightly/)
**Linux CPU with Intel® MKL-DNN** Stable Release | ![Build Status](https://tensorflow-ci.intel.com/job/tensorflow-mkl-build-release-whl/badge/icon) | Release [1.15](https://pypi.org/project/intel-tensorflow/1.15.0/) / [2.x](https://pypi.org/project/intel-tensorflow/)
**Red Hat® Enterprise Linux® 7.6 CPU & GPU** <br> Python 2.7, 3.6 | [![Build Status](https://jenkins-tensorflow.apps.ci.centos.org/buildStatus/icon?job=tensorflow-rhel7-3.6&build=2)](https://jenkins-tensorflow.apps.ci.centos.org/job/tensorflow-rhel7-3.6/2/) | [1.13.1 PyPI](https://tensorflow.pypi.thoth-station.ninja/index/)
## Resources
* [TensorFlow.org](https://www.tensorflow.org)
* [TensorFlow Tutorials](https://www.tensorflow.org/tutorials/)
* [TensorFlow Official Models](https://github.com/tensorflow/models/tree/master/official)
* [TensorFlow Examples](https://github.com/tensorflow/examples)
* [TensorFlow in Practice from Coursera](https://www.coursera.org/specializations/tensorflow-in-practice)
* [TensorFlow: Data and Deployment from Coursera](https://www.coursera.org/specializations/tensorflow-data-and-deployment)
* [Intro to TensorFlow for Deep Learning from Udacity](https://www.udacity.com/course/intro-to-tensorflow-for-deep-learning--ud187)
* [Introduction to TensorFlow Lite from Udacity](https://www.udacity.com/course/intro-to-tensorflow-lite--ud190)
* [TensorFlow Blog](https://blog.tensorflow.org)
* [Learn ML with TensorFlow](https://www.tensorflow.org/resources/learn-ml)
* [TensorFlow Twitter](https://twitter.com/tensorflow)
* [TensorFlow YouTube](https://www.youtube.com/channel/UC0rqucBdTuFTjJiefW5t-IQ)
* [TensorFlow Roadmap](https://www.tensorflow.org/community/roadmap)
* [TensorFlow White Papers](https://www.tensorflow.org/about/bib)
* [TensorBoard Visualization Toolkit](https://github.com/tensorflow/tensorboard)
Learn more about the
[TensorFlow community](https://www.tensorflow.org/community) and how to
[contribute](https://www.tensorflow.org/community/contribute).
## License
[Apache License 2.0](LICENSE)
// Copyright 2019 Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at:
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import TensorFlowLiteC
/// A delegate that the `Interpreter` uses to perform TensorFlow Lite model computations.
public protocol Delegate: class {
/// The `TfLiteDelegate` C pointer type.
typealias CDelegate = UnsafeMutablePointer<TfLiteDelegate>
/// The delegate that performs model computations.
var cDelegate: CDelegate { get }
}
// Copyright 2018 Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at:
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import Foundation
import TensorFlowLiteC
/// A TensorFlow Lite interpreter that performs inference from a given model.
public final class Interpreter {
/// The configuration options for the `Interpreter`.
public let options: Options?
/// An `Array` of `Delegate`s for the `Interpreter` to use to perform graph operations.
public let delegates: [Delegate]?
/// The total number of input `Tensor`s associated with the model.
public var inputTensorCount: Int {
return Int(TfLiteInterpreterGetInputTensorCount(cInterpreter))
}
/// The total number of output `Tensor`s associated with the model.
public var outputTensorCount: Int {
return Int(TfLiteInterpreterGetOutputTensorCount(cInterpreter))
}
/// The `TfLiteInterpreter` C pointer type represented as an `UnsafePointer<TfLiteInterpreter>`.
private typealias CInterpreter = OpaquePointer
/// The underlying `TfLiteInterpreter` C pointer.
private var cInterpreter: CInterpreter?
/// Creates a new instance with the given values.
///
/// - Parameters:
/// - modelPath: The local file path to a TensorFlow Lite model.
/// - options: Configurations for the `Interpreter`. The default is `nil` indicating that the
/// `Interpreter` will determine the configuration options.
/// - delegate: `Array` of `Delegate`s for the `Interpreter` to use to peform graph operations.
/// The default is `nil`.
/// - Throws: An error if the model could not be loaded or the interpreter could not be created.
public init(modelPath: String, options: Options? = nil, delegates: [Delegate]? = nil) throws {
guard let model = Model(filePath: modelPath) else { throw InterpreterError.failedToLoadModel }
guard let cInterpreterOptions = TfLiteInterpreterOptionsCreate() else {
throw InterpreterError.failedToCreateInterpreter
}
defer { TfLiteInterpreterOptionsDelete(cInterpreterOptions) }
self.options = options
self.delegates = delegates
options.map {
if let threadCount = $0.threadCount, threadCount > 0 {
TfLiteInterpreterOptionsSetNumThreads(cInterpreterOptions, Int32(threadCount))
}
TfLiteInterpreterOptionsSetErrorReporter(
cInterpreterOptions,
{ (_, format, args) -> Void in
// Workaround for optionality differences for x86_64 (non-optional) and arm64 (optional).
let optionalArgs: CVaListPointer? = args
guard let cFormat = format,
let arguments = optionalArgs,
let message = String(cFormat: cFormat, arguments: arguments)
else {
return
}
print(String(describing: InterpreterError.tensorFlowLiteError(message)))
},
nil
)
}
delegates?.forEach { TfLiteInterpreterOptionsAddDelegate(cInterpreterOptions, $0.cDelegate) }
guard let cInterpreter = TfLiteInterpreterCreate(model.cModel, cInterpreterOptions) else {
throw InterpreterError.failedToCreateInterpreter
}
self.cInterpreter = cInterpreter
}
deinit {
TfLiteInterpreterDelete(cInterpreter)
}
/// Invokes the interpreter to perform inference from the loaded graph.
///
/// - Throws: An error if the model was not ready because the tensors were not allocated.
public func invoke() throws {
guard TfLiteInterpreterInvoke(cInterpreter) == kTfLiteOk else {
throw InterpreterError.allocateTensorsRequired
}
}
/// Returns the input `Tensor` at the given index.
///
/// - Parameters:
/// - index: The index for the input `Tensor`.
/// - Throws: An error if the index is invalid or the tensors have not been allocated.
/// - Returns: The input `Tensor` at the given index.
public func input(at index: Int) throws -> Tensor {
let maxIndex = inputTensorCount - 1
guard case 0...maxIndex = index else {
throw InterpreterError.invalidTensorIndex(index: index, maxIndex: maxIndex)
}
guard let cTensor = TfLiteInterpreterGetInputTensor(cInterpreter, Int32(index)),
let bytes = TfLiteTensorData(cTensor),
let nameCString = TfLiteTensorName(cTensor)
else {
throw InterpreterError.allocateTensorsRequired
}
guard let dataType = Tensor.DataType(type: TfLiteTensorType(cTensor)) else {
throw InterpreterError.invalidTensorDataType
}
let name = String(cString: nameCString)
let rank = TfLiteTensorNumDims(cTensor)
let dimensions = (0..<rank).map { Int(TfLiteTensorDim(cTensor, $0)) }
let shape = Tensor.Shape(dimensions)
let byteCount = TfLiteTensorByteSize(cTensor)
let data = Data(bytes: bytes, count: byteCount)
let cQuantizationParams = TfLiteTensorQuantizationParams(cTensor)
let scale = cQuantizationParams.scale
let zeroPoint = Int(cQuantizationParams.zero_point)
var quantizationParameters: QuantizationParameters? = nil
if scale != 0.0 {
quantizationParameters = QuantizationParameters(scale: scale, zeroPoint: zeroPoint)
}
let tensor = Tensor(
name: name,
dataType: dataType,
shape: shape,
data: data,
quantizationParameters: quantizationParameters
)
return tensor
}
/// Returns the output `Tensor` at the given index.
///
/// - Parameters:
/// - index: The index for the output `Tensor`.
/// - Throws: An error if the index is invalid, tensors haven't been allocated, or interpreter
/// has not been invoked for models that dynamically compute output tensors based on the
/// values of its input tensors.
/// - Returns: The output `Tensor` at the given index.
public func output(at index: Int) throws -> Tensor {
let maxIndex = outputTensorCount - 1
guard case 0...maxIndex = index else {
throw InterpreterError.invalidTensorIndex(index: index, maxIndex: maxIndex)
}
guard let cTensor = TfLiteInterpreterGetOutputTensor(cInterpreter, Int32(index)),
let bytes = TfLiteTensorData(cTensor),
let nameCString = TfLiteTensorName(cTensor)
else {
throw InterpreterError.invokeInterpreterRequired
}
guard let dataType = Tensor.DataType(type: TfLiteTensorType(cTensor)) else {
throw InterpreterError.invalidTensorDataType
}
let name = String(cString: nameCString)
let rank = TfLiteTensorNumDims(cTensor)
let dimensions = (0..<rank).map { Int(TfLiteTensorDim(cTensor, $0)) }
let shape = Tensor.Shape(dimensions)
let byteCount = TfLiteTensorByteSize(cTensor)
let data = Data(bytes: bytes, count: byteCount)
let cQuantizationParams = TfLiteTensorQuantizationParams(cTensor)
let scale = cQuantizationParams.scale
let zeroPoint = Int(cQuantizationParams.zero_point)
var quantizationParameters: QuantizationParameters? = nil
if scale != 0.0 {
quantizationParameters = QuantizationParameters(scale: scale, zeroPoint: zeroPoint)
}
let tensor = Tensor(
name: name,
dataType: dataType,
shape: shape,
data: data,
quantizationParameters: quantizationParameters
)
return tensor
}
/// Resizes the input `Tensor` at the given index to the specified `Tensor.Shape`.
///
/// - Note: After resizing an input tensor, the client **must** explicitly call
/// `allocateTensors()` before attempting to access the resized tensor data or invoking the
/// interpreter to perform inference.
/// - Parameters:
/// - index: The index for the input `Tensor`.
/// - shape: The shape to resize the input `Tensor` to.
/// - Throws: An error if the input tensor at the given index could not be resized.
public func resizeInput(at index: Int, to shape: Tensor.Shape) throws {
let maxIndex = inputTensorCount - 1
guard case 0...maxIndex = index else {
throw InterpreterError.invalidTensorIndex(index: index, maxIndex: maxIndex)
}
guard TfLiteInterpreterResizeInputTensor(
cInterpreter,
Int32(index),
shape.int32Dimensions,
Int32(shape.rank)
) == kTfLiteOk
else {
throw InterpreterError.failedToResizeInputTensor(index: index)
}
}
/// Copies the given data to the input `Tensor` at the given index.
///
/// - Parameters:
/// - data: The data to be copied to the input `Tensor`'s data buffer.
/// - index: The index for the input `Tensor`.
/// - Throws: An error if the `data.count` does not match the input tensor's `data.count` or if
/// the given index is invalid.
/// - Returns: The input `Tensor` with the copied data.
@discardableResult
public func copy(_ data: Data, toInputAt index: Int) throws -> Tensor {
let maxIndex = inputTensorCount - 1
guard case 0...maxIndex = index else {
throw InterpreterError.invalidTensorIndex(index: index, maxIndex: maxIndex)
}
guard let cTensor = TfLiteInterpreterGetInputTensor(cInterpreter, Int32(index)) else {
throw InterpreterError.allocateTensorsRequired
}
let byteCount = TfLiteTensorByteSize(cTensor)
guard data.count == byteCount else {
throw InterpreterError.invalidTensorDataCount(provided: data.count, required: byteCount)
}
#if swift(>=5.0)
let status = data.withUnsafeBytes {
TfLiteTensorCopyFromBuffer(cTensor, $0.baseAddress, data.count)
}
#else
let status = data.withUnsafeBytes { TfLiteTensorCopyFromBuffer(cTensor, $0, data.count) }
#endif // swift(>=5.0)
guard status == kTfLiteOk else { throw InterpreterError.failedToCopyDataToInputTensor }
return try input(at: index)
}
/// Allocates memory for all input `Tensor`s based on their `Tensor.Shape`s.
///
/// - Note: This is a relatively expensive operation and should only be called after creating the
/// interpreter and resizing any input tensors.
/// - Throws: An error if memory could not be allocated for the input tensors.
public func allocateTensors() throws {
guard TfLiteInterpreterAllocateTensors(cInterpreter) == kTfLiteOk else {
throw InterpreterError.failedToAllocateTensors
}
}
}
extension Interpreter {
/// Options for configuring the `Interpreter`.
public struct Options: Equatable, Hashable {
/// The maximum number of CPU threads that the interpreter should run on. The default is `nil`
/// indicating that the `Interpreter` will decide the number of threads to use.
public var threadCount: Int? = nil
/// Creates a new instance with the default values.
public init() {}
}
}
/// A type alias for `Interpreter.Options` to support backwards compatiblity with the deprecated
/// `InterpreterOptions` struct.
@available(*, deprecated, renamed: "Interpreter.Options")
public typealias InterpreterOptions = Interpreter.Options
extension String {
/// Returns a new `String` initialized by using the given format C array as a template into which
/// the remaining argument values are substituted according to the user’s default locale.
///
/// - Note: Returns `nil` if a new `String` could not be constructed from the given values.
/// - Parameters:
/// - cFormat: The format C array as a template for substituting values.
/// - arguments: A C pointer to a `va_list` of arguments to substitute into `cFormat`.
init?(cFormat: UnsafePointer<CChar>, arguments: CVaListPointer) {
var buffer: UnsafeMutablePointer<CChar>?
guard vasprintf(&buffer, cFormat, arguments) != 0, let cString = buffer else { return nil }
self.init(validatingUTF8: cString)
}
}
// Copyright 2018 Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at:
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import Foundation
/// Errors thrown by the TensorFlow Lite `Interpreter`.
public enum InterpreterError: Error, Equatable, Hashable {
case invalidTensorIndex(index: Int, maxIndex: Int)
case invalidTensorDataCount(provided: Int, required: Int)
case invalidTensorDataType
case failedToLoadModel
case failedToCreateInterpreter
case failedToResizeInputTensor(index: Int)
case failedToCopyDataToInputTensor
case failedToAllocateTensors
case allocateTensorsRequired
case invokeInterpreterRequired
case tensorFlowLiteError(String)
}
extension InterpreterError: LocalizedError {
/// A localized description of the interpreter error.
public var errorDescription: String? {
switch self {
case .invalidTensorIndex(let index, let maxIndex):
return "Invalid tensor index \(index), max index is \(maxIndex)."
case .invalidTensorDataCount(let provided, let required):
return "Provided data count \(provided) must match the required count \(required)."
case .invalidTensorDataType:
return "Tensor data type is unsupported or could not be determined due to a model error."
case .failedToLoadModel:
return "Failed to load the given model."
case .failedToCreateInterpreter:
return "Failed to create the interpreter."
case .failedToResizeInputTensor(let index):
return "Failed to resize input tesnor at index \(index)."
case .failedToCopyDataToInputTensor:
return "Failed to copy data to input tensor."
case .failedToAllocateTensors:
return "Failed to allocate memory for input tensors."
case .allocateTensorsRequired:
return "Must call allocateTensors()."
case .invokeInterpreterRequired:
return "Must call invoke()."
case .tensorFlowLiteError(let message):
return "TensorFlow Lite Error: \(message)"
}
}
}
extension InterpreterError: CustomStringConvertible {
/// A textual representation of the TensorFlow Lite interpreter error.
public var description: String { return errorDescription ?? "Unknown error." }
}
// Copyright 2019 Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at:
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import TensorFlowLiteC
/// A delegate that uses the `Metal` framework for performing TensorFlow Lite graph operations with
/// GPU acceleration.
///
/// - Important: This is an experimental interface that is subject to change.
public final class MetalDelegate: Delegate {
/// The configuration options for the `MetalDelegate`.
public let options: Options
// Conformance to the `Delegate` protocol.
public private(set) var cDelegate: CDelegate
/// Creates a new instance configured with the given `options`.
///
/// - Parameters:
/// - options: Configurations for the delegate. The default is a new instance of
/// `MetalDelegate.Options` with the default configuration values.
public init(options: Options = Options()) {
self.options = options
var delegateOptions = TFLGpuDelegateOptions()
delegateOptions.allow_precision_loss = options.allowsPrecisionLoss
delegateOptions.wait_type = options.waitType.cWaitType
cDelegate = TFLGpuDelegateCreate(&delegateOptions)
}
deinit {
TFLGpuDelegateDelete(cDelegate)
}
}
extension MetalDelegate {
/// Options for configuring the `MetalDelegate`.
public struct Options: Equatable, Hashable {
/// Indicates whether the GPU delegate allows precision loss, such as allowing `Float16`
/// precision for a `Float32` computation. The default is `false`.
public var allowsPrecisionLoss = false
/// A type indicating how the current thread should wait for work on the GPU to complete. The
/// default is `passive`.
public var waitType: ThreadWaitType = .passive
/// Creates a new instance with the default values.
public init() {}
}
}
/// A type indicating how the current thread should wait for work scheduled on the GPU to complete.
public enum ThreadWaitType: Equatable, Hashable {
/// The thread does not wait for the work to complete. Useful when the output of the work is used
/// with the GPU pipeline.
case none
/// The thread waits until the work is complete.
case passive
/// The thread waits for the work to complete with minimal latency, which may require additional
/// CPU resources.
case active
/// The thread waits for the work while trying to prevent the GPU from going into sleep mode.
case aggressive
/// The C `TFLGpuDelegateWaitType` for the current `ThreadWaitType`.
var cWaitType: TFLGpuDelegateWaitType {
switch self {
case .none:
return TFLGpuDelegateWaitTypeDoNotWait
case .passive:
return TFLGpuDelegateWaitTypePassive
case .active:
return TFLGpuDelegateWaitTypeActive
case .aggressive:
return TFLGpuDelegateWaitTypeAggressive
}
}
}
// Copyright 2018 Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at:
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import TensorFlowLiteC
/// A TensorFlow Lite model used by the `Interpreter` to perform inference.
final class Model {
/// The `TfLiteModel` C pointer type represented as an `UnsafePointer<TfLiteModel>`.
typealias CModel = OpaquePointer
/// The underlying `TfLiteModel` C pointer.
let cModel: CModel?
/// Creates a new instance with the given `filePath`.
///
/// - Precondition: Initialization can fail if the given `filePath` is invalid.
/// - Parameters:
/// - filePath: The local file path to a TensorFlow Lite model.
init?(filePath: String) {
guard !filePath.isEmpty, let cModel = TfLiteModelCreateFromFile(filePath) else { return nil }
self.cModel = cModel
}
deinit {
TfLiteModelDelete(cModel)
}
}
// Copyright 2018 Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at:
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/// Parameters that determine the mapping of quantized values to real values. Quantized values can
/// be mapped to float values using the following conversion:
/// `realValue = scale * (quantizedValue - zeroPoint)`.
public struct QuantizationParameters: Equatable, Hashable {
/// The difference between real values corresponding to consecutive quantized values differing by
/// 1. For example, the range of quantized values for `UInt8` data type is [0, 255].
public let scale: Float
/// The quantized value that corresponds to the real 0 value.
public let zeroPoint: Int
/// Creates a new instance with the given values.
///
/// - Parameters:
/// - scale: The scale value for asymmetric quantization.
/// - zeroPoint: The zero point for asymmetric quantization.
init(scale: Float, zeroPoint: Int) {
self.scale = scale
self.zeroPoint = zeroPoint
}
}
// Copyright 2018 Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at:
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import Foundation
import TensorFlowLiteC
/// An input or output tensor in a TensorFlow Lite graph.
public struct Tensor: Equatable, Hashable {
/// The name of the `Tensor`.
public let name: String
/// The data type of the `Tensor`.
public let dataType: DataType
/// The shape of the `Tensor`.
public let shape: Shape
/// The data in the input or output `Tensor`.
public let data: Data
/// The quantization parameters for the `Tensor` if using a quantized model.
public let quantizationParameters: QuantizationParameters?
/// Creates a new input or output `Tensor` instance.
///
/// - Parameters:
/// - name: The name of the `Tensor`.
/// - dataType: The data type of the `Tensor`.
/// - shape: The shape of the `Tensor`.
/// - data: The data in the input `Tensor`.
/// - quantizationParameters Parameters for the `Tensor` if using a quantized model. The default
/// is `nil`.
init(
name: String,
dataType: DataType,
shape: Shape,
data: Data,
quantizationParameters: QuantizationParameters? = nil
) {
self.name = name
self.dataType = dataType
self.shape = shape
self.data = data
self.quantizationParameters = quantizationParameters
}
}
extension Tensor {
/// The supported `Tensor` data types.
public enum DataType: Equatable, Hashable {
/// A boolean.
case bool
/// An 8-bit unsigned integer.
case uInt8
/// A 16-bit signed integer.
case int16
/// A 32-bit signed integer.
case int32
/// A 64-bit signed integer.
case int64
/// A 16-bit half precision floating point.
case float16
/// A 32-bit single precision floating point.
case float32
/// Creates a new instance from the given `TfLiteType` or `nil` if the data type is unsupported
/// or could not be determined because there was an error.
///
/// - Parameter type: A data type for a tensor.
init?(type: TfLiteType) {
switch type {
case kTfLiteBool:
self = .bool
case kTfLiteUInt8:
self = .uInt8
case kTfLiteInt16:
self = .int16
case kTfLiteInt32:
self = .int32
case kTfLiteInt64:
self = .int64
case kTfLiteFloat16:
self = .float16
case kTfLiteFloat32:
self = .float32
case kTfLiteNoType:
fallthrough
default:
return nil
}
}
}
}
extension Tensor {
/// The shape of a `Tensor`.
public struct Shape: Equatable, Hashable {
/// The number of dimensions of the `Tensor`.
public let rank: Int
/// An array of dimensions for the `Tensor`.
public let dimensions: [Int]
/// An array of `Int32` dimensions for the `Tensor`.
var int32Dimensions: [Int32] { return dimensions.map(Int32.init) }
/// Creates a new instance with the given array of dimensions.
///
/// - Parameters:
/// - dimensions: Dimensions for the `Tensor`.
public init(_ dimensions: [Int]) {
self.rank = dimensions.count
self.dimensions = dimensions
}
/// Creates a new instance with the given elements representing the dimensions.
///
/// - Parameters:
/// - elements: Dimensions for the `Tensor`.
public init(_ elements: Int...) {
self.init(elements)
}
}
}
extension Tensor.Shape: ExpressibleByArrayLiteral {
/// Creates a new instance with the given array literal representing the dimensions.
///
/// - Parameters:
/// - arrayLiteral: Dimensions for the `Tensor`.
public init(arrayLiteral: Int...) {
self.init(arrayLiteral)
}
}
// Copyright 2019 Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at:
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import TensorFlowLiteC
/// TensorFlow Lite runtime values.
public enum Runtime {
/// A string describing the semantic versioning information for the runtime. Is an empty string if
/// the version could not be determined.
public static var version: String { return TfLiteVersion().map(String.init) ?? "" }
}
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment