Commit 244eb7f9 authored by apkadmin's avatar apkadmin

update build full bitcode

parent 7088d89b
No preview for this file type
This diff is collapsed.
<?xml version="1.0" encoding="UTF-8"?>
<Bucket
uuid = "B08B0591-1A45-4415-9C04-B83659FBFE35"
type = "0"
version = "2.0">
</Bucket>
......@@ -36,6 +36,7 @@ typealias FileInfo = (name: String, extension: String)
enum MobileNet {
static let modelInfo: FileInfo = (name: "liveness", extension: "tflite")
static let cardModel: FileInfo = (name: "valid_card_10102020", extension: "tflite")
static let landMarkModel: FileInfo = (name: "face_detection_front", extension: "tflite")
}
/// This class handles all data preprocessing and makes calls to run inference on a given frame
......@@ -213,16 +214,7 @@ class SBKModelDataHandler {
return results
}
/// Returns the RGB data representation of the given image buffer with the specified `byteCount`.
///
/// - Parameters
/// - buffer: The pixel buffer to convert to RGB data.
/// - byteCount: The expected byte count for the RGB data calculated using the values that the
/// model was trained on: `batchSize * imageWidth * imageHeight * componentsCount`.
/// - isModelQuantized: Whether the model is quantized (i.e. fixed point values rather than
/// floating point values).
/// - Returns: The RGB data representation of the image buffer or `nil` if the buffer could not be
/// converted.
private func rgbDataFromBuffer(
_ buffer: CVPixelBuffer,
byteCount: Int,
......@@ -293,26 +285,12 @@ class SBKModelDataHandler {
// MARK: - Extensions
extension Data {
/// Creates a new buffer by copying the buffer pointer of the given array.
///
/// - Warning: The given array's element type `T` must be trivial in that it can be copied bit
/// for bit with no indirection or reference-counting operations; otherwise, reinterpreting
/// data from the resulting buffer has undefined behavior.
/// - Parameter array: An array with elements of type `T`.
init<T>(copyingBufferOf array: [T]) {
self = array.withUnsafeBufferPointer(Data.init)
}
}
extension Array {
/// Creates a new array from the bytes of the given unsafe data.
///
/// - Warning: The array's `Element` type must be trivial in that it can be copied bit for bit
/// with no indirection or reference-counting operations; otherwise, copying the raw bytes in
/// the `unsafeData`'s buffer to a new array returns an unsafe copy.
/// - Note: Returns `nil` if `unsafeData.count` is not a multiple of
/// `MemoryLayout<Element>.stride`.
/// - Parameter unsafeData: The data containing the bytes to turn into an array.
init?(unsafeData: Data) {
guard unsafeData.count % MemoryLayout<Element>.stride == 0 else { return nil }
#if swift(>=5.0)
......@@ -327,20 +305,3 @@ extension Array {
#endif // swift(>=5.0)
}
}
extension UIImage {
func getPixelColor(pos: CGPoint, dataImage: Data, image: UIImage) -> UIColor {
let pixelData = image.cgImage!.dataProvider!.data
let data: UnsafePointer<UInt8> = CFDataGetBytePtr(pixelData)
let pixelInfo: Int = ((Int(image.size.width) * Int(pos.y)) + Int(pos.x)) * 4
let r = CGFloat(dataImage[pixelInfo]) / CGFloat(255.0)
let g = CGFloat(dataImage[pixelInfo+1]) / CGFloat(255.0)
let b = CGFloat(dataImage[pixelInfo+2]) / CGFloat(255.0)
let a = CGFloat(dataImage[pixelInfo+3]) / CGFloat(255.0)
return UIColor(red: r, green: g, blue: b, alpha: a)
}
}
//
// AnchorOption.swift
// OCR-SDK
//
// Created by annguyen on 12/03/2021.
// Copyright © 2021 itsol. All rights reserved.
//
import Foundation
class AnchorOption {
init(inputSizeWidth: Int, inputSizeHeight: Int, minScale: Double, maxScale: Double, anchorOffsetX: Double, anchorOffsetY: Double, numLayers: Int, featureMapWidth: [Int], featureMapHeight: [Int], strides: [Int], aspectRatios: [Double], reduceBoxesInLowestLayer: Bool, interpolatedScaleAspectRatio: Double, fixedAnchorSize: Bool) {
self.inputSizeWidth = inputSizeWidth
self.inputSizeHeight = inputSizeHeight
self.minScale = minScale
self.maxScale = maxScale
self.anchorOffsetX = anchorOffsetX
self.anchorOffsetY = anchorOffsetY
self.numLayers = numLayers
self.featureMapWidth = featureMapWidth
self.featureMapHeight = featureMapHeight
self.strides = strides
self.aspectRatios = aspectRatios
self.reduceBoxesInLowestLayer = reduceBoxesInLowestLayer
self.interpolatedScaleAspectRatio = interpolatedScaleAspectRatio
self.fixedAnchorSize = fixedAnchorSize
}
var inputSizeWidth: Int
var inputSizeHeight: Int
var minScale: Double
var maxScale: Double
var anchorOffsetX: Double
var anchorOffsetY: Double
var numLayers: Int
var featureMapWidth: [Int]
var featureMapHeight: [Int]
var strides: [Int]
var aspectRatios: [Double]
var reduceBoxesInLowestLayer: Bool
var interpolatedScaleAspectRatio: Double
var fixedAnchorSize: Bool
func stridesSize() -> Int {
return strides.count
}
func featureMapHeightSize() -> Int {
return featureMapHeight.count
}
func featureMapWidthSize() -> Int {
return featureMapWidth.count
}
}
//
// Detection.swift
// OCR-SDK
//
// Created by annguyen on 12/03/2021.
// Copyright © 2021 itsol. All rights reserved.
//
import Foundation
struct Detection {
init(score: Double, xMin: Double, yMin: Double, width: Double, height: Double, classID: Int, landMark: [Landmark]) {
self.score = score
self.xMin = xMin
self.yMin = yMin
self.width = width
self.height = height
self.classID = classID
self.landMark = landMark
}
var score: Double
var xMin: Double
var yMin: Double
var width: Double
var height: Double
var classID: Int
var landMark: [Landmark]
}
//
// EMSimilarity.swift
// SwiftSim
//
// Created by Evan Moss on 8/1/16.
// Copyright © 2016 Enterprising Technologies LLC. All rights reserved.
//
// The MIT License (MIT)
//
// Copyright (c) 2016 Evan Moss
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
import Foundation
enum EMSimilarityMode {
case Cosine
case Tanimoto
case Ochiai
case JaccardIndex
case JaccardDistance
case Dice
case Hamming
}
enum EMVectorSizeMismatchMode {
case Bail
case Truncate
}
class EMSimilarity {
/** Similarity metric mode **/
private var currentSimMode = [EMSimilarityMode.Cosine]
/** Set the currentSimMode via push **/
func pushSimMode(mode: EMSimilarityMode) {
self.currentSimMode.append(mode)
}
/** Pop the currentSimMode via pop if it won't make the stack empty **/
func popSimMode() {
if self.currentSimMode.count > 1 {
let _ = self.currentSimMode.popLast()
}
}
/** Get the currently set similarity mode **/
func getCurrentSimMode() -> EMSimilarityMode? {
return self.currentSimMode.last
}
/** Mismatch Mode **/
private var currentMismatchMode = [EMVectorSizeMismatchMode.Bail]
/** Set the currentMismatcMode via push **/
func pushMismatchMode(mode: EMVectorSizeMismatchMode) {
self.currentMismatchMode.append(mode)
}
/** Pop the currentMismatchMode via pop if it won't make the stack empty **/
func popMismatchMode() {
if self.currentMismatchMode.count > 1 {
let _ = self.currentMismatchMode.popLast()
}
}
/** Get the currently set mistmatch mode **/
func getCurrentMismatchMode() -> EMVectorSizeMismatchMode? {
return self.currentMismatchMode.last
}
/** Dot Product **/
private func dot(A: [Double], B: [Double]) -> Double {
var x: Double = 0
for i in 0...A.count-1 {
x += A[i] * B[i]
}
return x
}
/** Vector Magnitude **/
private func magnitude(A: [Double]) -> Double {
var x: Double = 0
for elt in A {
x += elt * elt
}
return sqrt(x)
}
/** Cosine similarity **/
private func cosineSim(A: [Double], B: [Double]) -> Double {
return dot(A: A, B: B) / (magnitude(A: A) * magnitude(A: B))
}
/** Tanimoto similarity **/
private func tanimotoSim(A: [Double], B: [Double]) -> Double {
let Amag = magnitude(A: A)
let Bmag = magnitude(A: B)
let AdotB = dot(A: A, B: B)
return AdotB / (Amag * Amag + Bmag * Bmag - AdotB)
}
/** Ochiai similarity **/
private func ochiaiSim(A: [Double], B: [Double]) -> Double {
let a = Set(A)
let b = Set(B)
return Double(a.intersection(b).count) / sqrt(Double(a.count) * Double(b.count))
}
/** Jaccard index **/
private func jaccardIndex(A: [Double], B: [Double]) -> Double {
let a = Set(A)
let b = Set(B)
return Double(a.intersection(b).count) / Double(a.union(b).count)
}
/** Jaccard distance **/
private func jaccardDist(A: [Double], B: [Double]) -> Double {
return 1.0 - jaccardIndex(A: A, B: B)
}
/** Dice coeeficient **/
private func diceCoef(A: [Double], B: [Double]) -> Double {
let a = Set(A)
let b = Set(B)
return 2.0 * Double(a.intersection(b).count) / (Double(a.count) + Double(b.count))
}
/** Hamming distance **/
private func hammingDist(A: [Double], B: [Double]) -> Double {
var x: Double = 0
if A.isEmpty {
return x
}
for i in 0...A.count-1 {
if A[i] != B[i] {
x += 1
}
}
return x
}
private let encforceEqualVectorSizes: Set<EMSimilarityMode> = [.Cosine, .Tanimoto, .Hamming]
private let bailOnEmptyInput: Set<EMSimilarityMode> = [.Cosine, .Tanimoto, .Ochiai]
private let allowEmptyInputs: Set<EMSimilarityMode> = [.Hamming]
/**
* Main compute mode
* Double types
* Returns the similarity results or -1.0 on caught error
*/
func compute(A: [Double], B: [Double]) -> Double {
// get the mode
var mode = EMSimilarityMode.Cosine
if let _mode = self.getCurrentSimMode() {
mode = _mode
}
else {
return -1
}
// are both vectors empty?
if A.isEmpty && B.isEmpty && !allowEmptyInputs.contains(mode) {
// divide by zero -> D.N.E.
return -1
}
// is one of the vectors empty and would this case a divide by zero error?
if bailOnEmptyInput.contains(mode) && (A.isEmpty || B.isEmpty) {
return -1
}
// look for vector size mismatch for modes in encforceEqualVectorSizes
if encforceEqualVectorSizes.contains(mode) && A.count != B.count {
if let mismatchMode = self.getCurrentMismatchMode() {
switch mismatchMode {
case .Bail:
return -1
case .Truncate:
let a = A.count < B.count ? A : B
let _b = A.count < B.count ? B : A
var b = [Double]()
if a.count > 0 {
for i in 0...a.count-1 {
b.append(_b[i])
}
}
return compute(A: a, B: b)
}
}
else {
return -1
}
}
switch mode {
case .Cosine:
return cosineSim(A: A, B: B)
case .Tanimoto:
return tanimotoSim(A: A, B: B)
case .Ochiai:
return ochiaiSim(A: A, B: B)
case .JaccardIndex:
return jaccardIndex(A: A, B: B)
case .JaccardDistance:
return jaccardDist(A: A, B: B)
case .Dice:
return diceCoef(A: A, B: B)
case .Hamming:
return hammingDist(A: A, B: B)
}
}
}
This diff is collapsed.
//
// LandMark.swift
// OCR-SDK
//
// Created by annguyen on 12/03/2021.
// Copyright © 2021 itsol. All rights reserved.
//
import Foundation
class Landmark{
init(x: Double, y: Double) {
self.x = x
self.y = y
}
var x: Double
var y: Double
}
//
// NormalizeOp.swift
// movanai
//
// Created by Nguyen Van An on 4/4/21.
//
import Foundation
class NormalizeOp {
var x: Float = 0
var y: Float = 0
init(_ x: Float, _ y: Float ) {
self.x = x
self.y = y
}
}
//
// OptionsFace.swift
// OCR-SDK
//
// Created by annguyen on 12/03/2021.
// Copyright © 2021 itsol. All rights reserved.
//
import Foundation
class OptionsFace {
init(numClasses: Int, numBoxes: Int, numCoords: Int, keypointCoordOffset: Int, ignoreClasses: [Int], scoreClippingThresh: Double, minScoreThresh: Double, numKeypoints: Int, numValuesPerKeypoint: Int, boxCoordOffset: Int, xScale: Double, yScale: Double, wScale: Double, hScale: Double, applyExponentialOnBoxSize: Bool, reverseOutputOrder: Bool, sigmoidScore: Bool, flipVertically: Bool) {
self.numClasses = numClasses
self.numBoxes = numBoxes
self.numCoords = numCoords
self.keypointCoordOffset = keypointCoordOffset
self.ignoreClasses = ignoreClasses
self.scoreClippingThresh = scoreClippingThresh
self.minScoreThresh = minScoreThresh
self.numKeypoints = numKeypoints
self.numValuesPerKeypoint = numValuesPerKeypoint
self.boxCoordOffset = boxCoordOffset
self.xScale = xScale
self.yScale = yScale
self.wScale = wScale
self.hScale = hScale
self.applyExponentialOnBoxSize = applyExponentialOnBoxSize
self.reverseOutputOrder = reverseOutputOrder
self.sigmoidScore = sigmoidScore
self.flipVertically = flipVertically
}
var numClasses: Int
var numBoxes: Int
var numCoords: Int
var keypointCoordOffset : Int
var ignoreClasses: [Int]
var scoreClippingThresh: Double
var minScoreThresh: Double
var numKeypoints: Int
var numValuesPerKeypoint: Int
var boxCoordOffset: Int
var xScale: Double
var yScale: Double
var wScale : Double
var hScale: Double
var applyExponentialOnBoxSize: Bool
var reverseOutputOrder: Bool
var sigmoidScore: Bool
var flipVertically: Bool
}
class Anchor {
init(xCenter: Double, yCenter: Double, h: Double, w: Double) {
self.xCenter = xCenter
self.yCenter = yCenter
self.h = h
self.w = w
}
var xCenter: Double
var yCenter: Double
var h: Double
var w: Double
}
......@@ -11,6 +11,7 @@ class SBKResultCapture: UIViewController {
@IBOutlet weak var imgPhotoCard: UIImageView!
public var imageData: Data?
public var dataCrop: Data?
var cropZone: CGRect?
@IBOutlet weak var btnNext: UIButton!
@IBOutlet weak var btnClose: UIButton!
@IBOutlet weak var lbDescription: UILabel!
......@@ -61,7 +62,9 @@ class SBKResultCapture: UIViewController {
if UIDevice.current.userInterfaceIdiom == .pad {
cropImage = SBKValidateInput.shared.cropImageHorizontal(image: imageCap!, rect: CGRect(x: imageCap!.size.width * 1 / 10, y: imageCap!.size.height * 3 / 20, width: imageCap!.size.width * 8 / 10, height: imageCap!.size.height * 8 / 10), scale: 1.0)!.rotate(radians: .pi / 2)
} else {
cropImage = self.cropImage(image: imageCap!, rect: CGRect(x: imageCap!.size.width / 20, y: imageCap!.size.height / 8 + imageCap!.size.height / 50, width: imageCap!.size.width * 18 / 20, height: imageCap!.size.width * 18 / 20 * 3 / 4 ), scale: scale)
if self.cropZone != nil {
cropImage = imageCap!.crop(rect: self.cropZone!, scale: 1.0)
}
}
dataCrop = cropImage!.pngData()
......@@ -72,14 +75,7 @@ class SBKResultCapture: UIViewController {
fatalError("init(coder:) has not been implemented")
}
//Xử lý ảnh hiển thị
func cropImage(image: UIImage, rect: CGRect, scale: CGFloat) -> UIImage? {
UIGraphicsBeginImageContextWithOptions(CGSize(width: rect.width, height: rect.height), true, 0.0)
image.draw(at: CGPoint(x: -rect.origin.x , y: -rect.origin.y ))
let croppedImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return croppedImage
}
func navigateToFace() {
DispatchQueue.main.async {
......@@ -153,27 +149,3 @@ class SBKResultCapture: UIViewController {
}
}
extension UIImage {
func rotate(radians: Float) -> UIImage? {
var newSize = CGRect(origin: CGPoint.zero, size: self.size).applying(CGAffineTransform(rotationAngle: CGFloat(radians))).size
// Trim off the extremely small float value to prevent core graphics from rounding it up
newSize.width = floor(newSize.width)
newSize.height = floor(newSize.height)
UIGraphicsBeginImageContextWithOptions(newSize, false, self.scale)
let context = UIGraphicsGetCurrentContext()!
// Move origin to middle
context.translateBy(x: newSize.width/2, y: newSize.height/2)
// Rotate around middle
context.rotate(by: CGFloat(radians))
// Draw the image at its center
self.draw(in: CGRect(x: -self.size.width/2, y: -self.size.height/2, width: self.size.width, height: self.size.height))
let newImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return newImage
}
}
//
// OverLayView.swift
// OCR-SDK
//
// Created by Nguyen Van An on 4/26/21.
// Copyright © 2021 itsol. All rights reserved.
//
import AVFoundation
import UIKit
class OverLayCardView: UIView {
@IBInspectable var previewWidth: CGFloat = 100
@IBInspectable var previewHeight: CGFloat = 100
@IBInspectable var borderLength : CGFloat = 10
@IBInspectable var borderPadding : CGFloat = 0
@IBInspectable var borderWidth : CGFloat = 2
@IBInspectable var borderColor : CGColor = UIColor.white.cgColor
@IBInspectable var marginTop : CGFloat = 0
@IBInspectable var marginLeft : CGFloat = 0
@IBInspectable var connerRadius : CGFloat = 8
let vContainer: UIView = {
let view = UIView()
view.backgroundColor = UIColor.black.withAlphaComponent(0.6)
return view
}()
let vContainer2: UIView = {
let view = UIView()
return view
}()
override init(frame: CGRect) {
super.init(frame: frame)
vContainer.frame = self.bounds
self.addSubview(vContainer)
vContainer2.frame = self.bounds
self.addSubview(vContainer2)
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
func setLayer(){
var x = marginTop
var y = marginLeft
if marginTop == 0 {
x = self.center.x - previewWidth/2
}
if marginLeft == 0 {
y = self.center.y - previewHeight/2
}
//add to container dask
let maskLayer = CALayer()
maskLayer.frame = self.bounds
let circleLayer = CAShapeLayer()
circleLayer.frame = CGRect(x:0 , y:0,width: self.frame.size.width,height: self.frame.size.height)
let finalPath = UIBezierPath(roundedRect: CGRect(x:0 , y:0,width: self.frame.size.width,height: self.frame.size.height), cornerRadius: 0)
let rectPath = UIBezierPath(roundedRect: CGRect(x: x, y: y,width: previewWidth, height: previewHeight), cornerRadius: connerRadius)
finalPath.append(rectPath.reversing())
circleLayer.path = finalPath.cgPath
maskLayer.addSublayer(circleLayer)
vContainer.layer.mask = maskLayer
let clearLayer = CALayer()
clearLayer.frame = vContainer2.bounds
//draw border radius
let path = UIBezierPath(roundedRect: CGRect(x: x + borderPadding, y: y + borderPadding, width: previewWidth - borderPadding*2, height: previewHeight - borderPadding*2), cornerRadius: connerRadius)
let shape = CAShapeLayer()
shape.path = path.cgPath
shape.strokeColor = borderColor
shape.lineWidth = borderWidth
shape.fillColor = UIColor.clear.cgColor
vContainer2.layer.addSublayer(shape)
}
func setBorderColor(color: CGColor){
if borderColor != color {
vContainer2.layer.sublayers = nil
borderColor = color
var x = marginTop
var y = marginLeft
if marginTop == 0 {
x = self.center.x - previewWidth/2
}
if marginLeft == 0 {
y = self.center.y - previewHeight/2
}
//draw border radius
let path = UIBezierPath(roundedRect: CGRect(x: x + borderPadding, y: y + borderPadding, width: previewWidth - borderPadding*2, height: previewHeight - borderPadding*2), cornerRadius: connerRadius)
let shape = CAShapeLayer()
shape.path = path.cgPath
shape.strokeColor = borderColor
shape.lineWidth = borderWidth
shape.fillColor = UIColor.clear.cgColor
vContainer2.layer.addSublayer(shape)
}
}
}
//
// ExtCGImage.swift
// OCR-SDK
//
// Created by Nguyen Van An on 4/26/21.
// Copyright © 2021 itsol. All rights reserved.
//
import Foundation
import UIKit
import CoreGraphics
import CoreImage
import VideoToolbox
extension CGImage{
func toCVPixelBuffer() -> CVPixelBuffer? {
let frameSize = CGSize(width: self.width, height: self.height)
var pixelBuffer:CVPixelBuffer? = nil
let status = CVPixelBufferCreate(kCFAllocatorDefault, Int(frameSize.width), Int(frameSize.height), kCVPixelFormatType_32BGRA , nil, &pixelBuffer)
if status != kCVReturnSuccess {
return nil
}
CVPixelBufferLockBaseAddress(pixelBuffer!, CVPixelBufferLockFlags.init(rawValue: 0))
let data = CVPixelBufferGetBaseAddress(pixelBuffer!)
let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
let bitmapInfo = CGBitmapInfo(rawValue: CGBitmapInfo.byteOrder32Little.rawValue | CGImageAlphaInfo.premultipliedFirst.rawValue)
let context = CGContext(data: data, width: Int(frameSize.width), height: Int(frameSize.height), bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(pixelBuffer!), space: rgbColorSpace, bitmapInfo: bitmapInfo.rawValue)
context?.draw(self, in: CGRect(x: 0, y: 0, width: self.width, height: self.height))
CVPixelBufferUnlockBaseAddress(pixelBuffer!, CVPixelBufferLockFlags(rawValue: 0))
return pixelBuffer
}
public static func create(pixelBuffer: CVPixelBuffer) -> CGImage? {
var cgImage: CGImage?
VTCreateCGImageFromCVPixelBuffer(pixelBuffer, options: nil, imageOut: &cgImage)
return cgImage
}
}
//
// ExtCIImage.swift
// OCR-SDK
//
// Created by Nguyen Van An on 4/26/21.
// Copyright © 2021 itsol. All rights reserved.
//
import Foundation
import UIKit
extension CIImage{
func toUIImage() -> UIImage {
let eaglContext = EAGLContext(api: .openGLES2)
let context:CIContext = CIContext(eaglContext: eaglContext!)
let cgImage:CGImage = context.createCGImage(self, from: self.extent)!
let image:UIImage = UIImage.init(cgImage: cgImage)
return image
}
}
// Copyright 2019 The TensorFlow Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// =============================================================================
import UIKit
import Accelerate
extension CVPixelBuffer {
/**
Returns thumbnail by cropping pixel buffer to biggest square and scaling the cropped image to
model dimensions.
*/
func centerThumbnail(ofSize size: CGSize ) -> CVPixelBuffer? {
let imageWidth = CVPixelBufferGetWidth(self)
let imageHeight = CVPixelBufferGetHeight(self)
let pixelBufferType = CVPixelBufferGetPixelFormatType(self)
assert(pixelBufferType == kCVPixelFormatType_32BGRA)
let inputImageRowBytes = CVPixelBufferGetBytesPerRow(self)
let imageChannels = 4
let thumbnailSize = min(imageWidth, imageHeight)
CVPixelBufferLockBaseAddress(self, CVPixelBufferLockFlags(rawValue: 0))
......@@ -139,5 +115,76 @@ extension CVPixelBuffer {
return pixelBuffer
}
func resized(to size: CGSize ) -> CVPixelBuffer? {
let imageWidth = CVPixelBufferGetWidth(self)
let imageHeight = CVPixelBufferGetHeight(self)
let pixelBufferType = CVPixelBufferGetPixelFormatType(self)
assert(pixelBufferType == kCVPixelFormatType_32BGRA ||
pixelBufferType == kCVPixelFormatType_32ARGB)
let inputImageRowBytes = CVPixelBufferGetBytesPerRow(self)
let imageChannels = 4
CVPixelBufferLockBaseAddress(self, CVPixelBufferLockFlags(rawValue: 0))
// Finds the biggest square in the pixel buffer and advances rows based on it.
guard let inputBaseAddress = CVPixelBufferGetBaseAddress(self) else {
return nil
}
// Gets vImage Buffer from input image
var inputVImageBuffer = vImage_Buffer(data: inputBaseAddress, height: UInt(imageHeight), width: UInt(imageWidth), rowBytes: inputImageRowBytes)
let scaledImageRowBytes = Int(size.width) * imageChannels
guard let scaledImageBytes = malloc(Int(size.height) * scaledImageRowBytes) else {
return nil
}
// Allocates a vImage buffer for scaled image.
var scaledVImageBuffer = vImage_Buffer(data: scaledImageBytes, height: UInt(size.height), width: UInt(size.width), rowBytes: scaledImageRowBytes)
// Performs the scale operation on input image buffer and stores it in scaled image buffer.
let scaleError = vImageScale_ARGB8888(&inputVImageBuffer, &scaledVImageBuffer, nil, vImage_Flags(0))
CVPixelBufferUnlockBaseAddress(self, CVPixelBufferLockFlags(rawValue: 0))
guard scaleError == kvImageNoError else {
return nil
}
let releaseCallBack: CVPixelBufferReleaseBytesCallback = {mutablePointer, pointer in
if let pointer = pointer {
free(UnsafeMutableRawPointer(mutating: pointer))
}
}
var scaledPixelBuffer: CVPixelBuffer?
// Converts the scaled vImage buffer to CVPixelBuffer
let conversionStatus = CVPixelBufferCreateWithBytes(nil, Int(size.width), Int(size.height), pixelBufferType, scaledImageBytes, scaledImageRowBytes, releaseCallBack, nil, nil, &scaledPixelBuffer)
guard conversionStatus == kCVReturnSuccess else {
free(scaledImageBytes)
return nil
}
return scaledPixelBuffer
}
func toUIImage() -> UIImage {
let ciimage : CIImage = CIImage(cvPixelBuffer: self)
let imageView : UIImage = ciimage.toUIImage()
return imageView
}
func crop(rect: CGRect, scale: CGFloat) -> CVPixelBuffer? {
let imageCgi = CGImage.create(pixelBuffer: self)!
return imageCgi.cropping(to: rect)!.toCVPixelBuffer()
}
}
//
// ExtUIImage.swift
// OCR-SDK
//
// Created by Nguyen Van An on 4/26/21.
// Copyright © 2021 itsol. All rights reserved.
//
import Foundation
import UIKit
extension UIImage {
//Xử lý ảnh hiển thị
func crop(rect: CGRect, scale: CGFloat) -> UIImage? {
UIGraphicsBeginImageContextWithOptions(CGSize(width: rect.width, height: rect.height), true, 0.0)
self.draw(at: CGPoint(x: -rect.minX / scale, y: -rect.minY / scale))
let croppedImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return croppedImage
}
func getPixelColor(pos: CGPoint, dataImage: Data, image: UIImage) -> UIColor {
let pixelData = image.cgImage!.dataProvider!.data
let data: UnsafePointer<UInt8> = CFDataGetBytePtr(pixelData)
let pixelInfo: Int = ((Int(image.size.width) * Int(pos.y)) + Int(pos.x)) * 4
let r = CGFloat(dataImage[pixelInfo]) / CGFloat(255.0)
let g = CGFloat(dataImage[pixelInfo+1]) / CGFloat(255.0)
let b = CGFloat(dataImage[pixelInfo+2]) / CGFloat(255.0)
let a = CGFloat(dataImage[pixelInfo+3]) / CGFloat(255.0)
return UIColor(red: r, green: g, blue: b, alpha: a)
}
func rotate(radians: Float) -> UIImage? {
var newSize = CGRect(origin: CGPoint.zero, size: self.size).applying(CGAffineTransform(rotationAngle: CGFloat(radians))).size
newSize.width = floor(newSize.width)
newSize.height = floor(newSize.height)
UIGraphicsBeginImageContextWithOptions(newSize, false, self.scale)
let context = UIGraphicsGetCurrentContext()!
context.translateBy(x: newSize.width/2, y: newSize.height/2)
context.rotate(by: CGFloat(radians))
self.draw(in: CGRect(x: -self.size.width/2, y: -self.size.height/2, width: self.size.width, height: self.size.height))
let newImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return newImage
}
func resize(targetSize: CGSize) -> UIImage {
let size = self.size
let widthRatio = targetSize.width / self.size.width
let heightRatio = targetSize.height / self.size.height
var newSize: CGSize
if(widthRatio > heightRatio) {
newSize = CGSize(width: size.width * heightRatio, height: size.height * heightRatio)
} else {
newSize = CGSize(width: size.width * widthRatio, height: size.height * widthRatio)
}
let rect = CGRect(x: 0, y: 0, width: newSize.width, height: newSize.height)
UIGraphicsBeginImageContextWithOptions(newSize, false, 1.0)
self.draw(in: rect)
let newImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return newImage!
}
func toCVPixel() -> CVPixelBuffer {
let ciimage = CIImage(image: self)
let eaglContext = EAGLContext(api: .openGLES2)
let tmpcontext = CIContext(eaglContext: eaglContext!)
let cgimage = tmpcontext.createCGImage(ciimage!, from: ciimage!.extent)
return cgimage!.toCVPixelBuffer()!
}
}
......@@ -37,4 +37,24 @@ extension UIViewController {
func validateImage(image: Data) -> Bool {
return true
}
func popupBackToCaptureCardFront() {
let alert = UIAlertController(title: "Confirm".localized(), message: "You will go back to front card photography.".localized(), preferredStyle: .alert)
let actionClose = UIAlertAction(title: "No".localized(), style: .cancel, handler: nil)
alert.addAction(actionClose)
let actionOk = UIAlertAction(title: "Yes".localized(), style: .default, handler: {
action in
for controller in self.navigationController!.viewControllers as Array {
if controller.isKind(of: SBKCaptureCardVC.self) {
self.navigationController!.popToViewController(controller, animated: true)
break
}
}
})
alert.addAction(actionOk)
present(alert, animated: true, completion: nil)
}
}
......@@ -6,13 +6,45 @@ target 'OCR-SDK' do
use_frameworks!
# Pods for OCR-SDK
pod 'TensorFlowLiteSwift'
pod 'TensorFlowLiteSwift', '~>2.2.0'
#pod 'GoogleMobileVision/FaceDetector'
#pod 'GTMSessionFetcher'
end
#post_install do |installer|
# installer.pods_project.build_configurations.each do |config|
# config.build_settings["EXCLUDED_ARCHS[sdk=iphonesimulator*]"] = "arm64"
# config.build_settings["BITCODE_GENERATION_MODE"] = "bitcode"
# end
#end
#bitcode enable
post_install do |installer|
installer.pods_project.build_configurations.each do |config|
config.build_settings["EXCLUDED_ARCHS[sdk=iphonesimulator*]"] = "arm64"
installer.pods_project.targets.each do |target|
target.build_configurations.each do |config|
# set valid architecture
config.build_settings['VALID_ARCHS'] = 'arm64 armv7 armv7s i386 x86_64'
# build active architecture only (Debug build all)
config.build_settings['ONLY_ACTIVE_ARCH'] = 'NO'
config.build_settings['ENABLE_BITCODE'] = 'YES'
if config.name == 'Release' || config.name == 'Pro'
config.build_settings['BITCODE_GENERATION_MODE'] = 'bitcode'
else # Debug
config.build_settings['BITCODE_GENERATION_MODE'] = 'marker'
end
cflags = config.build_settings['OTHER_CFLAGS'] || ['$(inherited)']
if config.name == 'Release' || config.name == 'Pro'
cflags << '-fembed-bitcode'
else # Debug
cflags << '-fembed-bitcode-marker'
end
config.build_settings['OTHER_CFLAGS'] = cflags
end
end
end
......@@ -4,7 +4,7 @@ PODS:
- TensorFlowLiteC (= 2.2.0)
DEPENDENCIES:
- TensorFlowLiteSwift
- TensorFlowLiteSwift (~> 2.2.0)
SPEC REPOS:
trunk:
......@@ -15,6 +15,6 @@ SPEC CHECKSUMS:
TensorFlowLiteC: b3ab9e867b0b71052ca102a32a786555b330b02e
TensorFlowLiteSwift: 2dd5e9c895e1819501f0fba3d8b69a536bda6c65
PODFILE CHECKSUM: a8990648dc4761bcfc73655f0e8e51e3109f0e4f
PODFILE CHECKSUM: 61617ddc17c979c1fe27a952bf716e6bd14ac52c
COCOAPODS: 1.10.1
......@@ -4,7 +4,7 @@ PODS:
- TensorFlowLiteC (= 2.2.0)
DEPENDENCIES:
- TensorFlowLiteSwift
- TensorFlowLiteSwift (~> 2.2.0)
SPEC REPOS:
trunk:
......@@ -15,6 +15,6 @@ SPEC CHECKSUMS:
TensorFlowLiteC: b3ab9e867b0b71052ca102a32a786555b330b02e
TensorFlowLiteSwift: 2dd5e9c895e1819501f0fba3d8b69a536bda6c65
PODFILE CHECKSUM: a8990648dc4761bcfc73655f0e8e51e3109f0e4f
PODFILE CHECKSUM: 61617ddc17c979c1fe27a952bf716e6bd14ac52c
COCOAPODS: 1.10.1
......@@ -402,6 +402,7 @@
baseConfigurationReference = AED476478C959569CFCC3DF9E47408C5 /* Pods-OCR-SDK.debug.xcconfig */;
buildSettings = {
ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = NO;
BITCODE_GENERATION_MODE = marker;
CLANG_ENABLE_OBJC_WEAK = NO;
"CODE_SIGN_IDENTITY[sdk=appletvos*]" = "";
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "";
......@@ -411,6 +412,7 @@
DYLIB_COMPATIBILITY_VERSION = 1;
DYLIB_CURRENT_VERSION = 1;
DYLIB_INSTALL_NAME_BASE = "@rpath";
ENABLE_BITCODE = YES;
INFOPLIST_FILE = "Target Support Files/Pods-OCR-SDK/Pods-OCR-SDK-Info.plist";
INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks";
IPHONEOS_DEPLOYMENT_TARGET = 11.0;
......@@ -421,6 +423,11 @@
);
MACH_O_TYPE = staticlib;
MODULEMAP_FILE = "Target Support Files/Pods-OCR-SDK/Pods-OCR-SDK.modulemap";
ONLY_ACTIVE_ARCH = NO;
OTHER_CFLAGS = (
"$(inherited)",
"-fembed-bitcode-marker",
);
OTHER_LDFLAGS = "";
OTHER_LIBTOOLFLAGS = "";
PODS_ROOT = "$(SRCROOT)";
......@@ -429,6 +436,7 @@
SDKROOT = iphoneos;
SKIP_INSTALL = YES;
TARGETED_DEVICE_FAMILY = "1,2";
VALID_ARCHS = "arm64 armv7 armv7s i386 x86_64";
VERSIONING_SYSTEM = "apple-generic";
VERSION_INFO_PREFIX = "";
};
......@@ -440,14 +448,22 @@
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
BITCODE_GENERATION_MODE = bitcode;
ENABLE_BITCODE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 9.0;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
);
ONLY_ACTIVE_ARCH = NO;
OTHER_CFLAGS = (
"$(inherited)",
"-fembed-bitcode",
);
SDKROOT = iphoneos;
TARGETED_DEVICE_FAMILY = "1,2";
VALIDATE_PRODUCT = YES;
VALID_ARCHS = "arm64 armv7 armv7s i386 x86_64";
};
name = Release;
};
......@@ -489,7 +505,6 @@
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
"EXCLUDED_ARCHS[sdk=iphonesimulator*]" = arm64;
GCC_C_LANGUAGE_STANDARD = gnu11;
GCC_NO_COMMON_BLOCKS = YES;
GCC_PREPROCESSOR_DEFINITIONS = (
......@@ -552,7 +567,6 @@
DEBUG_INFORMATION_FORMAT = dwarf;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_TESTABILITY = YES;
"EXCLUDED_ARCHS[sdk=iphonesimulator*]" = arm64;
GCC_C_LANGUAGE_STANDARD = gnu11;
GCC_DYNAMIC_NO_PIC = NO;
GCC_NO_COMMON_BLOCKS = YES;
......@@ -587,13 +601,21 @@
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
BITCODE_GENERATION_MODE = marker;
ENABLE_BITCODE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 9.0;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
);
ONLY_ACTIVE_ARCH = NO;
OTHER_CFLAGS = (
"$(inherited)",
"-fembed-bitcode-marker",
);
SDKROOT = iphoneos;
TARGETED_DEVICE_FAMILY = "1,2";
VALID_ARCHS = "arm64 armv7 armv7s i386 x86_64";
};
name = Debug;
};
......@@ -601,6 +623,7 @@
isa = XCBuildConfiguration;
baseConfigurationReference = 4DEF9604B3A10391246BB01C3B360192 /* TensorFlowLiteSwift.release.xcconfig */;
buildSettings = {
BITCODE_GENERATION_MODE = bitcode;
"CODE_SIGN_IDENTITY[sdk=appletvos*]" = "";
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "";
"CODE_SIGN_IDENTITY[sdk=watchos*]" = "";
......@@ -609,6 +632,7 @@
DYLIB_COMPATIBILITY_VERSION = 1;
DYLIB_CURRENT_VERSION = 1;
DYLIB_INSTALL_NAME_BASE = "@rpath";
ENABLE_BITCODE = YES;
GCC_PREFIX_HEADER = "Target Support Files/TensorFlowLiteSwift/TensorFlowLiteSwift-prefix.pch";
INFOPLIST_FILE = "Target Support Files/TensorFlowLiteSwift/TensorFlowLiteSwift-Info.plist";
INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks";
......@@ -620,6 +644,11 @@
);
MACH_O_TYPE = staticlib;
MODULEMAP_FILE = "Target Support Files/TensorFlowLiteSwift/TensorFlowLiteSwift.modulemap";
ONLY_ACTIVE_ARCH = NO;
OTHER_CFLAGS = (
"$(inherited)",
"-fembed-bitcode",
);
PRODUCT_MODULE_NAME = TensorFlowLite;
PRODUCT_NAME = TensorFlowLite;
SDKROOT = iphoneos;
......@@ -628,6 +657,7 @@
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2";
VALIDATE_PRODUCT = YES;
VALID_ARCHS = "arm64 armv7 armv7s i386 x86_64";
VERSIONING_SYSTEM = "apple-generic";
VERSION_INFO_PREFIX = "";
};
......@@ -637,6 +667,7 @@
isa = XCBuildConfiguration;
baseConfigurationReference = CF82B8C58A0FD821537E3660EAB99FAB /* TensorFlowLiteSwift.debug.xcconfig */;
buildSettings = {
BITCODE_GENERATION_MODE = marker;
"CODE_SIGN_IDENTITY[sdk=appletvos*]" = "";
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "";
"CODE_SIGN_IDENTITY[sdk=watchos*]" = "";
......@@ -645,6 +676,7 @@
DYLIB_COMPATIBILITY_VERSION = 1;
DYLIB_CURRENT_VERSION = 1;
DYLIB_INSTALL_NAME_BASE = "@rpath";
ENABLE_BITCODE = YES;
GCC_PREFIX_HEADER = "Target Support Files/TensorFlowLiteSwift/TensorFlowLiteSwift-prefix.pch";
INFOPLIST_FILE = "Target Support Files/TensorFlowLiteSwift/TensorFlowLiteSwift-Info.plist";
INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks";
......@@ -656,6 +688,11 @@
);
MACH_O_TYPE = staticlib;
MODULEMAP_FILE = "Target Support Files/TensorFlowLiteSwift/TensorFlowLiteSwift.modulemap";
ONLY_ACTIVE_ARCH = NO;
OTHER_CFLAGS = (
"$(inherited)",
"-fembed-bitcode-marker",
);
PRODUCT_MODULE_NAME = TensorFlowLite;
PRODUCT_NAME = TensorFlowLite;
SDKROOT = iphoneos;
......@@ -663,6 +700,7 @@
SWIFT_ACTIVE_COMPILATION_CONDITIONS = "$(inherited) ";
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2";
VALID_ARCHS = "arm64 armv7 armv7s i386 x86_64";
VERSIONING_SYSTEM = "apple-generic";
VERSION_INFO_PREFIX = "";
};
......@@ -673,6 +711,7 @@
baseConfigurationReference = E6111D41D5230B9B2A5C44624B29EA42 /* Pods-OCR-SDK.release.xcconfig */;
buildSettings = {
ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = NO;
BITCODE_GENERATION_MODE = bitcode;
CLANG_ENABLE_OBJC_WEAK = NO;
"CODE_SIGN_IDENTITY[sdk=appletvos*]" = "";
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "";
......@@ -682,6 +721,7 @@
DYLIB_COMPATIBILITY_VERSION = 1;
DYLIB_CURRENT_VERSION = 1;
DYLIB_INSTALL_NAME_BASE = "@rpath";
ENABLE_BITCODE = YES;
INFOPLIST_FILE = "Target Support Files/Pods-OCR-SDK/Pods-OCR-SDK-Info.plist";
INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks";
IPHONEOS_DEPLOYMENT_TARGET = 11.0;
......@@ -692,6 +732,11 @@
);
MACH_O_TYPE = staticlib;
MODULEMAP_FILE = "Target Support Files/Pods-OCR-SDK/Pods-OCR-SDK.modulemap";
ONLY_ACTIVE_ARCH = NO;
OTHER_CFLAGS = (
"$(inherited)",
"-fembed-bitcode",
);
OTHER_LDFLAGS = "";
OTHER_LIBTOOLFLAGS = "";
PODS_ROOT = "$(SRCROOT)";
......@@ -701,6 +746,7 @@
SKIP_INSTALL = YES;
TARGETED_DEVICE_FAMILY = "1,2";
VALIDATE_PRODUCT = YES;
VALID_ARCHS = "arm64 armv7 armv7s i386 x86_64";
VERSIONING_SYSTEM = "apple-generic";
VERSION_INFO_PREFIX = "";
};
......
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>SchemeUserState</key>
<dict>
<key>Pods-OCR-SDK.xcscheme</key>
<dict>
<key>isShown</key>
<false/>
</dict>
<key>TensorFlowLiteC.xcscheme</key>
<dict>
<key>isShown</key>
<false/>
</dict>
<key>TensorFlowLiteSwift.xcscheme</key>
<dict>
<key>isShown</key>
<false/>
</dict>
</dict>
<key>SuppressBuildableAutocreation</key>
<dict/>
</dict>
</plist>
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>SchemeUserState</key>
<dict>
<key>Pods-OCR-SDK.xcscheme_^#shared#^_</key>
<dict>
<key>orderHint</key>
<integer>3</integer>
</dict>
<key>TensorFlowLiteC.xcscheme_^#shared#^_</key>
<dict>
<key>orderHint</key>
<integer>1</integer>
</dict>
<key>TensorFlowLiteSwift.xcscheme_^#shared#^_</key>
<dict>
<key>orderHint</key>
<integer>2</integer>
</dict>
</dict>
</dict>
</plist>
......@@ -4,21 +4,23 @@
<dict>
<key>SchemeUserState</key>
<dict>
<key>Pods-OCR-SDK.xcscheme_^#shared#^_</key>
<key>Pods-OCR-SDK.xcscheme</key>
<dict>
<key>orderHint</key>
<integer>2</integer>
<key>isShown</key>
<false/>
</dict>
<key>TensorFlowLiteC.xcscheme_^#shared#^_</key>
<key>TensorFlowLiteC.xcscheme</key>
<dict>
<key>orderHint</key>
<integer>1</integer>
<key>isShown</key>
<false/>
</dict>
<key>TensorFlowLiteSwift.xcscheme_^#shared#^_</key>
<key>TensorFlowLiteSwift.xcscheme</key>
<dict>
<key>orderHint</key>
<integer>3</integer>
<key>isShown</key>
<false/>
</dict>
</dict>
<key>SuppressBuildableAutocreation</key>
<dict/>
</dict>
</plist>
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "1100"
version = "1.3">
<BuildAction
parallelizeBuildables = "YES"
buildImplicitDependencies = "YES">
<BuildActionEntries>
<BuildActionEntry
buildForTesting = "YES"
buildForRunning = "YES"
buildForProfiling = "YES"
buildForArchiving = "YES"
buildForAnalyzing = "YES">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "DE1F4D51AD94C30627575AEE202FD099"
BuildableName = "Pods_OCR_SDK.framework"
BlueprintName = "Pods-OCR-SDK"
ReferencedContainer = "container:Pods.xcodeproj">
</BuildableReference>
</BuildActionEntry>
</BuildActionEntries>
</BuildAction>
<TestAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
shouldUseLaunchSchemeArgsEnv = "YES">
<Testables>
</Testables>
</TestAction>
<LaunchAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
launchStyle = "0"
useCustomWorkingDirectory = "NO"
ignoresPersistentStateOnLaunch = "NO"
debugDocumentVersioning = "YES"
debugServiceExtension = "internal"
allowLocationSimulation = "YES">
</LaunchAction>
<ProfileAction
buildConfiguration = "Release"
shouldUseLaunchSchemeArgsEnv = "YES"
savedToolIdentifier = ""
useCustomWorkingDirectory = "NO"
debugDocumentVersioning = "YES">
</ProfileAction>
<AnalyzeAction
buildConfiguration = "Debug">
</AnalyzeAction>
<ArchiveAction
buildConfiguration = "Release"
revealArchiveInOrganizer = "YES">
</ArchiveAction>
</Scheme>
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "1100"
version = "1.3">
<BuildAction
parallelizeBuildables = "YES"
buildImplicitDependencies = "YES">
<BuildActionEntries>
<BuildActionEntry
buildForAnalyzing = "YES"
buildForTesting = "YES"
buildForRunning = "YES"
buildForProfiling = "YES"
buildForArchiving = "YES">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "AC559E53E13B6FBEF4F5CC310A73AFE6"
BuildableName = "TensorFlowLiteC"
BlueprintName = "TensorFlowLiteC"
ReferencedContainer = "container:Pods.xcodeproj">
</BuildableReference>
</BuildActionEntry>
</BuildActionEntries>
</BuildAction>
<TestAction
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
shouldUseLaunchSchemeArgsEnv = "YES"
buildConfiguration = "Debug">
<AdditionalOptions>
</AdditionalOptions>
</TestAction>
<LaunchAction
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
launchStyle = "0"
useCustomWorkingDirectory = "NO"
ignoresPersistentStateOnLaunch = "NO"
debugDocumentVersioning = "YES"
debugServiceExtension = "internal"
buildConfiguration = "Debug"
allowLocationSimulation = "YES">
<AdditionalOptions>
</AdditionalOptions>
</LaunchAction>
<ProfileAction
savedToolIdentifier = ""
useCustomWorkingDirectory = "NO"
debugDocumentVersioning = "YES"
buildConfiguration = "Release"
shouldUseLaunchSchemeArgsEnv = "YES">
</ProfileAction>
<AnalyzeAction
buildConfiguration = "Debug">
</AnalyzeAction>
<ArchiveAction
buildConfiguration = "Release"
revealArchiveInOrganizer = "YES">
</ArchiveAction>
</Scheme>
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "1100"
version = "1.3">
<BuildAction
parallelizeBuildables = "YES"
buildImplicitDependencies = "YES">
<BuildActionEntries>
<BuildActionEntry
buildForTesting = "YES"
buildForRunning = "YES"
buildForProfiling = "YES"
buildForArchiving = "YES"
buildForAnalyzing = "YES">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "10418167F619D6DA72BADAD10F9EC02B"
BuildableName = "TensorFlowLite.framework"
BlueprintName = "TensorFlowLiteSwift"
ReferencedContainer = "container:Pods.xcodeproj">
</BuildableReference>
</BuildActionEntry>
</BuildActionEntries>
</BuildAction>
<TestAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
shouldUseLaunchSchemeArgsEnv = "YES">
<Testables>
</Testables>
</TestAction>
<LaunchAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
launchStyle = "0"
useCustomWorkingDirectory = "NO"
ignoresPersistentStateOnLaunch = "NO"
debugDocumentVersioning = "YES"
debugServiceExtension = "internal"
allowLocationSimulation = "YES">
</LaunchAction>
<ProfileAction
buildConfiguration = "Release"
shouldUseLaunchSchemeArgsEnv = "YES"
savedToolIdentifier = ""
useCustomWorkingDirectory = "NO"
debugDocumentVersioning = "YES">
</ProfileAction>
<AnalyzeAction
buildConfiguration = "Debug">
</AnalyzeAction>
<ArchiveAction
buildConfiguration = "Release"
revealArchiveInOrganizer = "YES">
</ArchiveAction>
</Scheme>
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>SchemeUserState</key>
<dict>
<key>Pods-OCR-SDK.xcscheme</key>
<dict>
<key>isShown</key>
<false/>
</dict>
<key>TensorFlowLiteC.xcscheme</key>
<dict>
<key>isShown</key>
<false/>
</dict>
<key>TensorFlowLiteSwift.xcscheme</key>
<dict>
<key>isShown</key>
<false/>
</dict>
</dict>
<key>SuppressBuildableAutocreation</key>
<dict/>
</dict>
</plist>
${PODS_ROOT}/Target Support Files/Pods-OCR-SDK/Pods-OCR-SDK-resources.sh
${PODS_CONFIGURATION_BUILD_DIR}/GoogleMobileVision/GoogleMVFaceDetectorResources.bundle
\ No newline at end of file
${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/GoogleMVFaceDetectorResources.bundle
\ No newline at end of file
${PODS_ROOT}/Target Support Files/Pods-OCR-SDK/Pods-OCR-SDK-resources.sh
${PODS_CONFIGURATION_BUILD_DIR}/GoogleMobileVision/GoogleMVFaceDetectorResources.bundle
\ No newline at end of file
${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/GoogleMVFaceDetectorResources.bundle
\ No newline at end of file
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${UNLOCALIZED_RESOURCES_FOLDER_PATH+x} ]; then
# If UNLOCALIZED_RESOURCES_FOLDER_PATH is not set, then there's nowhere for us to copy
# resources to, so exit 0 (signalling the script phase was successful).
exit 0
fi
mkdir -p "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
RESOURCES_TO_COPY=${PODS_ROOT}/resources-to-copy-${TARGETNAME}.txt
> "$RESOURCES_TO_COPY"
XCASSET_FILES=()
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
case "${TARGETED_DEVICE_FAMILY:-}" in
1,2)
TARGET_DEVICE_ARGS="--target-device ipad --target-device iphone"
;;
1)
TARGET_DEVICE_ARGS="--target-device iphone"
;;
2)
TARGET_DEVICE_ARGS="--target-device ipad"
;;
3)
TARGET_DEVICE_ARGS="--target-device tv"
;;
4)
TARGET_DEVICE_ARGS="--target-device watch"
;;
*)
TARGET_DEVICE_ARGS="--target-device mac"
;;
esac
install_resource()
{
if [[ "$1" = /* ]] ; then
RESOURCE_PATH="$1"
else
RESOURCE_PATH="${PODS_ROOT}/$1"
fi
if [[ ! -e "$RESOURCE_PATH" ]] ; then
cat << EOM
error: Resource "$RESOURCE_PATH" not found. Run 'pod install' to update the copy resources script.
EOM
exit 1
fi
case $RESOURCE_PATH in
*.storyboard)
echo "ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile ${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .storyboard`.storyboardc $RESOURCE_PATH --sdk ${SDKROOT} ${TARGET_DEVICE_ARGS}" || true
ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .storyboard`.storyboardc" "$RESOURCE_PATH" --sdk "${SDKROOT}" ${TARGET_DEVICE_ARGS}
;;
*.xib)
echo "ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile ${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .xib`.nib $RESOURCE_PATH --sdk ${SDKROOT} ${TARGET_DEVICE_ARGS}" || true
ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .xib`.nib" "$RESOURCE_PATH" --sdk "${SDKROOT}" ${TARGET_DEVICE_ARGS}
;;
*.framework)
echo "mkdir -p ${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" || true
mkdir -p "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" $RESOURCE_PATH ${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" || true
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
;;
*.xcdatamodel)
echo "xcrun momc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH"`.mom\"" || true
xcrun momc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodel`.mom"
;;
*.xcdatamodeld)
echo "xcrun momc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodeld`.momd\"" || true
xcrun momc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodeld`.momd"
;;
*.xcmappingmodel)
echo "xcrun mapc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcmappingmodel`.cdm\"" || true
xcrun mapc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcmappingmodel`.cdm"
;;
*.xcassets)
ABSOLUTE_XCASSET_FILE="$RESOURCE_PATH"
XCASSET_FILES+=("$ABSOLUTE_XCASSET_FILE")
;;
*)
echo "$RESOURCE_PATH" || true
echo "$RESOURCE_PATH" >> "$RESOURCES_TO_COPY"
;;
esac
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_resource "${PODS_CONFIGURATION_BUILD_DIR}/GoogleMobileVision/GoogleMVFaceDetectorResources.bundle"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_resource "${PODS_CONFIGURATION_BUILD_DIR}/GoogleMobileVision/GoogleMVFaceDetectorResources.bundle"
fi
mkdir -p "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
rsync -avr --copy-links --no-relative --exclude '*/.svn/*' --files-from="$RESOURCES_TO_COPY" / "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
if [[ "${ACTION}" == "install" ]] && [[ "${SKIP_INSTALL}" == "NO" ]]; then
mkdir -p "${INSTALL_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
rsync -avr --copy-links --no-relative --exclude '*/.svn/*' --files-from="$RESOURCES_TO_COPY" / "${INSTALL_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
fi
rm -f "$RESOURCES_TO_COPY"
if [[ -n "${WRAPPER_EXTENSION}" ]] && [ "`xcrun --find actool`" ] && [ -n "${XCASSET_FILES:-}" ]
then
# Find all other xcassets (this unfortunately includes those of path pods and other targets).
OTHER_XCASSETS=$(find -L "$PWD" -iname "*.xcassets" -type d)
while read line; do
if [[ $line != "${PODS_ROOT}*" ]]; then
XCASSET_FILES+=("$line")
fi
done <<<"$OTHER_XCASSETS"
if [ -z ${ASSETCATALOG_COMPILER_APPICON_NAME+x} ]; then
printf "%s\0" "${XCASSET_FILES[@]}" | xargs -0 xcrun actool --output-format human-readable-text --notices --warnings --platform "${PLATFORM_NAME}" --minimum-deployment-target "${!DEPLOYMENT_TARGET_SETTING_NAME}" ${TARGET_DEVICE_ARGS} --compress-pngs --compile "${BUILT_PRODUCTS_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
else
printf "%s\0" "${XCASSET_FILES[@]}" | xargs -0 xcrun actool --output-format human-readable-text --notices --warnings --platform "${PLATFORM_NAME}" --minimum-deployment-target "${!DEPLOYMENT_TARGET_SETTING_NAME}" ${TARGET_DEVICE_ARGS} --compress-pngs --compile "${BUILT_PRODUCTS_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}" --app-icon "${ASSETCATALOG_COMPILER_APPICON_NAME}" --output-partial-info-plist "${TARGET_TEMP_DIR}/assetcatalog_generated_info_cocoapods.plist"
fi
fi
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
# yarn lockfile v1
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment