Julian Steenbakker

imp: upgrade gradle and iOS improvements

@@ -6,7 +6,7 @@ buildscript { @@ -6,7 +6,7 @@ buildscript {
6 } 6 }
7 7
8 dependencies { 8 dependencies {
9 - classpath 'com.android.tools.build:gradle:7.1.0' 9 + classpath 'com.android.tools.build:gradle:7.1.1'
10 classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version" 10 classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
11 } 11 }
12 } 12 }
1 -#Tue Feb 08 10:35:11 CET 2022 1 +#Tue Feb 15 22:11:04 CET 2022
2 distributionBase=GRADLE_USER_HOME 2 distributionBase=GRADLE_USER_HOME
3 -distributionUrl=https\://services.gradle.org/distributions/gradle-7.3.3-bin.zip 3 +distributionUrl=https\://services.gradle.org/distributions/gradle-7.4-bin.zip
4 distributionPath=wrapper/dists 4 distributionPath=wrapper/dists
5 zipStorePath=wrapper/dists 5 zipStorePath=wrapper/dists
6 zipStoreBase=GRADLE_USER_HOME 6 zipStoreBase=GRADLE_USER_HOME
  1 +//
  2 +// SwiftMobileScanner.swift
  3 +// mobile_scanner
  4 +//
  5 +// Created by Julian Steenbakker on 15/02/2022.
  6 +//
  7 +
  8 +import Foundation
@@ -5,40 +5,53 @@ import MLKitBarcodeScanning @@ -5,40 +5,53 @@ import MLKitBarcodeScanning
5 5
6 public class SwiftMobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler, FlutterTexture, AVCaptureVideoDataOutputSampleBufferDelegate { 6 public class SwiftMobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler, FlutterTexture, AVCaptureVideoDataOutputSampleBufferDelegate {
7 7
8 - public static func register(with registrar: FlutterPluginRegistrar) {  
9 - let instance = SwiftMobileScannerPlugin(registrar.textures())  
10 -  
11 - let method = FlutterMethodChannel(name: "dev.steenbakker.mobile_scanner/scanner/method", binaryMessenger: registrar.messenger())  
12 - registrar.addMethodCallDelegate(instance, channel: method)  
13 -  
14 - let event = FlutterEventChannel(name: "dev.steenbakker.mobile_scanner/scanner/event", binaryMessenger: registrar.messenger())  
15 - event.setStreamHandler(instance)  
16 - }  
17 -  
18 let registry: FlutterTextureRegistry 8 let registry: FlutterTextureRegistry
  9 +
  10 + // Sink for publishing event changes
19 var sink: FlutterEventSink! 11 var sink: FlutterEventSink!
  12 +
  13 + // Texture id of the camera preview
20 var textureId: Int64! 14 var textureId: Int64!
  15 +
  16 + // Capture session of the camera
21 var captureSession: AVCaptureSession! 17 var captureSession: AVCaptureSession!
  18 +
  19 + // The selected camera
22 var device: AVCaptureDevice! 20 var device: AVCaptureDevice!
  21 +
  22 + // Image to be sent to the texture
23 var latestBuffer: CVImageBuffer! 23 var latestBuffer: CVImageBuffer!
24 - var analyzeMode: Int  
25 - var analyzing: Bool 24 +
  25 +
  26 + var analyzeMode: Int = 0
  27 + var analyzing: Bool = false
  28 + var position = AVCaptureDevice.Position.back
  29 +
  30 + public static func register(with registrar: FlutterPluginRegistrar) {
  31 + let instance = SwiftMobileScannerPlugin(registrar.textures())
  32 +
  33 + let method = FlutterMethodChannel(name:
  34 + "dev.steenbakker.mobile_scanner/scanner/method", binaryMessenger: registrar.messenger())
  35 + let event = FlutterEventChannel(name:
  36 + "dev.steenbakker.mobile_scanner/scanner/event", binaryMessenger: registrar.messenger())
  37 + registrar.addMethodCallDelegate(instance, channel: method)
  38 + event.setStreamHandler(instance)
  39 + }
26 40
27 init(_ registry: FlutterTextureRegistry) { 41 init(_ registry: FlutterTextureRegistry) {
28 self.registry = registry 42 self.registry = registry
29 - analyzeMode = 0  
30 - analyzing = false  
31 super.init() 43 super.init()
32 } 44 }
33 45
  46 +
34 public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) { 47 public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) {
35 switch call.method { 48 switch call.method {
36 case "state": 49 case "state":
37 - stateNative(call, result) 50 + checkPermission(call, result)
38 case "request": 51 case "request":
39 - requestNative(call, result) 52 + requestPermission(call, result)
40 case "start": 53 case "start":
41 - startNative(call, result) 54 + start(call, result)
42 case "torch": 55 case "torch":
43 torchNative(call, result) 56 torchNative(call, result)
44 case "analyze": 57 case "analyze":
@@ -50,16 +63,19 @@ public class SwiftMobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHan @@ -50,16 +63,19 @@ public class SwiftMobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHan
50 } 63 }
51 } 64 }
52 65
  66 + // FlutterStreamHandler
53 public func onListen(withArguments arguments: Any?, eventSink events: @escaping FlutterEventSink) -> FlutterError? { 67 public func onListen(withArguments arguments: Any?, eventSink events: @escaping FlutterEventSink) -> FlutterError? {
54 sink = events 68 sink = events
55 return nil 69 return nil
56 } 70 }
57 71
  72 + // FlutterStreamHandler
58 public func onCancel(withArguments arguments: Any?) -> FlutterError? { 73 public func onCancel(withArguments arguments: Any?) -> FlutterError? {
59 sink = nil 74 sink = nil
60 return nil 75 return nil
61 } 76 }
62 77
  78 + // FlutterTexture
63 public func copyPixelBuffer() -> Unmanaged<CVPixelBuffer>? { 79 public func copyPixelBuffer() -> Unmanaged<CVPixelBuffer>? {
64 if latestBuffer == nil { 80 if latestBuffer == nil {
65 return nil 81 return nil
@@ -67,60 +83,60 @@ public class SwiftMobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHan @@ -67,60 +83,60 @@ public class SwiftMobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHan
67 return Unmanaged<CVPixelBuffer>.passRetained(latestBuffer) 83 return Unmanaged<CVPixelBuffer>.passRetained(latestBuffer)
68 } 84 }
69 85
70 - public func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {  
71 -  
72 - latestBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)  
73 - registry.textureFrameAvailable(textureId)  
74 -  
75 - switch analyzeMode {  
76 - case 1: // barcode  
77 - if analyzing {  
78 - break  
79 - }  
80 - analyzing = true  
81 - let buffer = CMSampleBufferGetImageBuffer(sampleBuffer)  
82 - let image = VisionImage(image: buffer!.image)  
83 - image.orientation = imageOrientation(  
84 - deviceOrientation: UIDevice.current.orientation,  
85 - defaultOrientation: .portrait  
86 - )  
87 -  
88 - let scanner = BarcodeScanner.barcodeScanner()  
89 - scanner.process(image) { [self] barcodes, error in  
90 - if error == nil && barcodes != nil {  
91 - for barcode in barcodes! {  
92 - let event: [String: Any?] = ["name": "barcode", "data": barcode.data]  
93 - sink?(event)  
94 - }  
95 - }  
96 - analyzing = false  
97 - }  
98 - default: // none  
99 - break  
100 - }  
101 - } 86 +// public func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
  87 +//
  88 +// latestBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)
  89 +// registry.textureFrameAvailable(textureId)
  90 +//
  91 +// switch analyzeMode {
  92 +// case 1: // barcode
  93 +// if analyzing {
  94 +// break
  95 +// }
  96 +// analyzing = true
  97 +// let buffer = CMSampleBufferGetImageBuffer(sampleBuffer)
  98 +// let image = VisionImage(image: buffer!.image)
  99 +// image.orientation = imageOrientation(
  100 +// deviceOrientation: UIDevice.current.orientation,
  101 +// defaultOrientation: .portrait
  102 +// )
  103 +//
  104 +// let scanner = BarcodeScanner.barcodeScanner()
  105 +// scanner.process(image) { [self] barcodes, error in
  106 +// if error == nil && barcodes != nil {
  107 +// for barcode in barcodes! {
  108 +// let event: [String: Any?] = ["name": "barcode", "data": barcode.data]
  109 +// sink?(event)
  110 +// }
  111 +// }
  112 +// analyzing = false
  113 +// }
  114 +// default: // none
  115 +// break
  116 +// }
  117 +// }
102 118
103 - func imageOrientation(  
104 - deviceOrientation: UIDeviceOrientation,  
105 - defaultOrientation: UIDeviceOrientation  
106 - ) -> UIImage.Orientation {  
107 - switch deviceOrientation {  
108 - case .portrait:  
109 - return position == .front ? .leftMirrored : .right  
110 - case .landscapeLeft:  
111 - return position == .front ? .downMirrored : .up  
112 - case .portraitUpsideDown:  
113 - return position == .front ? .rightMirrored : .left  
114 - case .landscapeRight:  
115 - return position == .front ? .upMirrored : .down  
116 - case .faceDown, .faceUp, .unknown:  
117 - return .up  
118 - @unknown default:  
119 - return imageOrientation(deviceOrientation: defaultOrientation, defaultOrientation: .portrait)  
120 - }  
121 - } 119 +// func imageOrientation(
  120 +// deviceOrientation: UIDeviceOrientation,
  121 +// defaultOrientation: UIDeviceOrientation
  122 +// ) -> UIImage.Orientation {
  123 +// switch deviceOrientation {
  124 +// case .portrait:
  125 +// return position == .front ? .leftMirrored : .right
  126 +// case .landscapeLeft:
  127 +// return position == .front ? .downMirrored : .up
  128 +// case .portraitUpsideDown:
  129 +// return position == .front ? .rightMirrored : .left
  130 +// case .landscapeRight:
  131 +// return position == .front ? .upMirrored : .down
  132 +// case .faceDown, .faceUp, .unknown:
  133 +// return .up
  134 +// @unknown default:
  135 +// return imageOrientation(deviceOrientation: defaultOrientation, defaultOrientation: .portrait)
  136 +// }
  137 +// }
122 138
123 - func stateNative(_ call: FlutterMethodCall, _ result: @escaping FlutterResult) { 139 + func checkPermission(_ call: FlutterMethodCall, _ result: @escaping FlutterResult) {
124 let status = AVCaptureDevice.authorizationStatus(for: .video) 140 let status = AVCaptureDevice.authorizationStatus(for: .video)
125 switch status { 141 switch status {
126 case .notDetermined: 142 case .notDetermined:
@@ -132,20 +148,18 @@ public class SwiftMobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHan @@ -132,20 +148,18 @@ public class SwiftMobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHan
132 } 148 }
133 } 149 }
134 150
135 - func requestNative(_ call: FlutterMethodCall, _ result: @escaping FlutterResult) { 151 + func requestPermission(_ call: FlutterMethodCall, _ result: @escaping FlutterResult) {
136 AVCaptureDevice.requestAccess(for: .video, completionHandler: { result($0) }) 152 AVCaptureDevice.requestAccess(for: .video, completionHandler: { result($0) })
137 } 153 }
138 -  
139 - var position = AVCaptureDevice.Position.back  
140 -  
141 - func startNative(_ call: FlutterMethodCall, _ result: @escaping FlutterResult) { 154 +
  155 + func start(_ call: FlutterMethodCall, _ result: @escaping FlutterResult) {
142 textureId = registry.register(self) 156 textureId = registry.register(self)
143 captureSession = AVCaptureSession() 157 captureSession = AVCaptureSession()
144 158
145 let argReader = MapArgumentReader(call.arguments as? [String: Any]) 159 let argReader = MapArgumentReader(call.arguments as? [String: Any])
146 160
147 - guard let targetWidth = argReader.int(key: "targetWidth"),  
148 - let targetHeight = argReader.int(key: "targetHeight"), 161 + guard let ratio = argReader.int(key: "ratio"),
  162 + let torch = argReader.int(key: "torch"),
149 let facing = argReader.int(key: "facing") else { 163 let facing = argReader.int(key: "facing") else {
150 result(FlutterError(code: "INVALID_ARGUMENT", message: "Missing a required argument", details: "Expecting targetWidth, targetHeight, formats, and optionally heartbeatTimeout")) 164 result(FlutterError(code: "INVALID_ARGUMENT", message: "Missing a required argument", details: "Expecting targetWidth, targetHeight, formats, and optionally heartbeatTimeout"))
151 return 165 return