Ryan Duffy

Better memory footprint and using a dispatch queue

By not passing in the buffer to the barcode detect and using an image instead the footprint of the memory usage is significantly reduced.
... ... @@ -2,6 +2,7 @@ import AVFoundation
import FlutterMacOS
import Vision
import AppKit
import VideoToolbox
public class MobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler, FlutterTexture, AVCaptureVideoDataOutputSampleBufferDelegate {
... ... @@ -17,7 +18,7 @@ public class MobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler,
var captureSession: AVCaptureSession!
// The selected camera
var device: AVCaptureDevice!
weak var device: AVCaptureDevice!
// Image to be sent to the texture
var latestBuffer: CVImageBuffer!
... ... @@ -95,7 +96,7 @@ public class MobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler,
}
var nextScanTime = 0.0
var imagesCurrentlyBeingProcessed = 0
var imagesCurrentlyBeingProcessed = false
// Gets called when a new image is added to the buffer
public func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
... ... @@ -111,45 +112,47 @@ public class MobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler,
registry.textureFrameAvailable(textureId)
let currentTime = Date().timeIntervalSince1970
let eligibleForScan = currentTime > nextScanTime && imagesCurrentlyBeingProcessed == 0;
let eligibleForScan = currentTime > nextScanTime && imagesCurrentlyBeingProcessed == false
if ((detectionSpeed == DetectionSpeed.normal || detectionSpeed == DetectionSpeed.noDuplicates) && eligibleForScan || detectionSpeed == DetectionSpeed.unrestricted) {
nextScanTime = currentTime + timeoutSeconds
imagesCurrentlyBeingProcessed += 1
let imageRequestHandler = VNImageRequestHandler(
cvPixelBuffer: latestBuffer,
orientation: .right)
imagesCurrentlyBeingProcessed = true
DispatchQueue.global(qos: .userInitiated).async { [weak self] in
if(self!.latestBuffer == nil){
return
}
var cgImage: CGImage?
VTCreateCGImageFromCVPixelBuffer(self!.latestBuffer, options: nil, imageOut: &cgImage)
let imageRequestHandler = VNImageRequestHandler(cgImage: cgImage!)
do {
let barcodeRequest:VNDetectBarcodesRequest = VNDetectBarcodesRequest(completionHandler: { [self] (request, error) in
imagesCurrentlyBeingProcessed -= 1
let barcodeRequest:VNDetectBarcodesRequest = VNDetectBarcodesRequest(completionHandler: { [weak self] (request, error) in
self?.imagesCurrentlyBeingProcessed = false
if error == nil {
if let results = request.results as? [VNBarcodeObservation] {
for barcode in results {
if self.scanWindow != nil {
let match = self.isbarCodeInScanWindow(self.scanWindow!, barcode, self.latestBuffer)
if (!match) {
if self?.scanWindow != nil && cgImage != nil {
let match = self?.isbarCodeInScanWindow(self!.scanWindow!, barcode, cgImage!)
if (match == false) {
continue
}
}
let barcodeType = String(barcode.symbology.rawValue).replacingOccurrences(of: "VNBarcodeSymbology", with: "")
let event: [String: Any?] = ["name": "barcodeMac", "data" : ["payload": barcode.payloadStringValue, "symbology": barcode.symbology.toInt as Any?]]
self.sink?(event)
// if barcodeType == "QR" {
// let image = CIImage(image: source)
// image?.cropping(to: barcode.boundingBox)
// self.qrCodeDescriptor(qrCode: barcode, qrCodeImage: image!)
// }
DispatchQueue.main.async {
self?.sink?(["name": "barcodeMac", "data" : ["payload": barcode.payloadStringValue, "symbology": barcode.symbology.toInt as Any?]] as [String : Any])
}
// if barcodeType == "QR" {
// let image = CIImage(image: source)
// image?.cropping(to: barcode.boundingBox)
// self.qrCodeDescriptor(qrCode: barcode, qrCodeImage: image!)
// }
}
}
} else {
print(error!.localizedDescription)
}
})
if(symbologies.isEmpty == false){
if(self?.symbologies.isEmpty == false){
// add the symbologies the user wishes to support
barcodeRequest.symbologies = symbologies
barcodeRequest.symbologies = self!.symbologies
}
try imageRequestHandler.perform([barcodeRequest])
} catch {
... ... @@ -157,6 +160,7 @@ public class MobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler,
}
}
}
}
func checkPermission(_ call: FlutterMethodCall, _ result: @escaping FlutterResult) {
if #available(macOS 10.14, *) {
... ... @@ -199,6 +203,20 @@ public class MobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler,
scanWindow = CGRect(x: minX, y: minY, width: width, height: height)
}
func isbarCodeInScanWindow(_ scanWindow: CGRect, _ barcode: VNBarcodeObservation, _ inputImage: CGImage) -> Bool {
let imageWidth = CGFloat(inputImage.width);
let imageHeight = CGFloat(inputImage.height);
let minX = scanWindow.minX * imageWidth
let minY = scanWindow.minY * imageHeight
let width = scanWindow.width * imageWidth
let height = scanWindow.height * imageHeight
let scaledScanWindow = CGRect(x: minX, y: minY, width: width, height: height)
return scaledScanWindow.contains(barcode.boundingBox)
}
func isbarCodeInScanWindow(_ scanWindow: CGRect, _ barcode: VNBarcodeObservation, _ inputImage: CVImageBuffer) -> Bool {
let size = CVImageBufferGetEncodedSize(inputImage)
... ... @@ -227,11 +245,11 @@ public class MobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler,
let argReader = MapArgumentReader(call.arguments as? [String: Any])
// let ratio: Int = argReader.int(key: "ratio")
let torch: Bool = argReader.bool(key: "torch") ?? false
let facing: Int = argReader.int(key: "facing") ?? 1
let speed: Int = (call.arguments as! Dictionary<String, Any?>)["speed"] as? Int ?? 0
let timeoutMs: Int = (call.arguments as! Dictionary<String, Any?>)["timeout"] as? Int ?? 0
// let ratio: Int = argReader.int(key: "ratio")
let torch:Bool = argReader.bool(key: "torch") ?? false
let facing:Int = argReader.int(key: "facing") ?? 1
let speed:Int = argReader.int(key: "speed") ?? 0
let timeoutMs:Int = argReader.int(key: "timeout") ?? 0
symbologies = argReader.toSymbology()
timeoutSeconds = Double(timeoutMs) / 1000.0
... ... @@ -285,7 +303,7 @@ public class MobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler,
videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue.main)
captureSession.addOutput(videoOutput)
for connection in videoOutput.connections {
// connection.videoOrientation = .portrait
// connection.videoOrientation = .portrait
if position == .front && connection.isVideoMirroringSupported {
connection.isVideoMirrored = true
}
... ... @@ -472,5 +490,4 @@ extension VNBarcodeSymbology {
return -1;
}
}
}
... ...