Enguerrand ARMINJON
Committed by Enguerrand_ARMINJON_MAC_2

Merge branch 'juliansteenbakker:master' into feature/increase-camera-quality

@@ -2,7 +2,7 @@ group 'dev.steenbakker.mobile_scanner' @@ -2,7 +2,7 @@ group 'dev.steenbakker.mobile_scanner'
2 version '1.0-SNAPSHOT' 2 version '1.0-SNAPSHOT'
3 3
4 buildscript { 4 buildscript {
5 - ext.kotlin_version = '1.9.10' 5 + ext.kotlin_version = '1.7.22'
6 repositories { 6 repositories {
7 google() 7 google()
8 mavenCentral() 8 mavenCentral()
@@ -172,12 +172,12 @@ class MobileScanner( @@ -172,12 +172,12 @@ class MobileScanner(
172 172
173 // Return the best resolution for the actual device orientation. 173 // Return the best resolution for the actual device orientation.
174 // By default camera set its resolution to width 480 and height 640 which is too low for ML KIT. 174 // By default camera set its resolution to width 480 and height 640 which is too low for ML KIT.
175 - // If we return an higher resolution than device can handle, camera package take the most relavant one available. 175 + // If we return an higher resolution than device can handle, camera package take the most relevant one available.
176 // Resolution set must take care of device orientation to preserve aspect ratio. 176 // Resolution set must take care of device orientation to preserve aspect ratio.
177 - private fun getResolution(windowManager: WindowManager): Size { 177 + private fun getResolution(windowManager: WindowManager, androidResolution: Size): Size {
178 val rotation = windowManager.defaultDisplay.rotation 178 val rotation = windowManager.defaultDisplay.rotation
179 - val widthMaxRes = 480 * 4;  
180 - val heightMaxRes = 640 * 4; 179 + val widthMaxRes = androidResolution.width
  180 + val heightMaxRes = androidResolution.height
181 181
182 val targetResolution = if (rotation == Surface.ROTATION_0 || rotation == Surface.ROTATION_180) { 182 val targetResolution = if (rotation == Surface.ROTATION_0 || rotation == Surface.ROTATION_180) {
183 Size(widthMaxRes, heightMaxRes) // Portrait mode 183 Size(widthMaxRes, heightMaxRes) // Portrait mode
@@ -201,7 +201,8 @@ class MobileScanner( @@ -201,7 +201,8 @@ class MobileScanner(
201 torchStateCallback: TorchStateCallback, 201 torchStateCallback: TorchStateCallback,
202 zoomScaleStateCallback: ZoomScaleStateCallback, 202 zoomScaleStateCallback: ZoomScaleStateCallback,
203 mobileScannerStartedCallback: MobileScannerStartedCallback, 203 mobileScannerStartedCallback: MobileScannerStartedCallback,
204 - detectionTimeout: Long 204 + detectionTimeout: Long,
  205 + androidResolution: Size?
205 ) { 206 ) {
206 this.detectionSpeed = detectionSpeed 207 this.detectionSpeed = detectionSpeed
207 this.detectionTimeout = detectionTimeout 208 this.detectionTimeout = detectionTimeout
@@ -253,16 +254,19 @@ class MobileScanner( @@ -253,16 +254,19 @@ class MobileScanner(
253 .setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST) 254 .setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST)
254 val displayManager = activity.applicationContext.getSystemService(Context.DISPLAY_SERVICE) as DisplayManager 255 val displayManager = activity.applicationContext.getSystemService(Context.DISPLAY_SERVICE) as DisplayManager
255 val windowManager = activity.applicationContext.getSystemService(Context.WINDOW_SERVICE) as WindowManager 256 val windowManager = activity.applicationContext.getSystemService(Context.WINDOW_SERVICE) as WindowManager
256 - // Set initial resolution  
257 - analysisBuilder.setTargetResolution(getResolution(windowManager))  
258 - // Listen future orientation  
259 - displayManager.registerDisplayListener(object : DisplayManager.DisplayListener {  
260 - override fun onDisplayAdded(displayId: Int) {}  
261 - override fun onDisplayRemoved(displayId: Int) {}  
262 - override fun onDisplayChanged(displayId: Int) {  
263 - analysisBuilder.setTargetResolution(getResolution(windowManager))  
264 - }  
265 - }, null) 257 +
  258 + if (androidResolution != null) {
  259 + // Override initial resolution
  260 + analysisBuilder.setTargetResolution(getResolution(windowManager, androidResolution))
  261 + // Listen future orientation change to apply the custom resolution
  262 + displayManager.registerDisplayListener(object : DisplayManager.DisplayListener {
  263 + override fun onDisplayAdded(displayId: Int) {}
  264 + override fun onDisplayRemoved(displayId: Int) {}
  265 + override fun onDisplayChanged(displayId: Int) {
  266 + analysisBuilder.setTargetResolution(getResolution(windowManager, androidResolution))
  267 + }
  268 + }, null)
  269 + }
266 270
267 val analysis = analysisBuilder.build().apply { setAnalyzer(executor, captureOutput) } 271 val analysis = analysisBuilder.build().apply { setAnalyzer(executor, captureOutput) }
268 272
@@ -2,6 +2,7 @@ package dev.steenbakker.mobile_scanner @@ -2,6 +2,7 @@ package dev.steenbakker.mobile_scanner
2 2
3 import android.app.Activity 3 import android.app.Activity
4 import android.net.Uri 4 import android.net.Uri
  5 +import android.util.Size
5 import androidx.camera.core.CameraSelector 6 import androidx.camera.core.CameraSelector
6 import androidx.camera.core.ExperimentalGetImage 7 import androidx.camera.core.ExperimentalGetImage
7 import com.google.mlkit.vision.barcode.BarcodeScannerOptions 8 import com.google.mlkit.vision.barcode.BarcodeScannerOptions
@@ -133,6 +134,12 @@ class MobileScannerHandler( @@ -133,6 +134,12 @@ class MobileScannerHandler(
133 val returnImage: Boolean = call.argument<Boolean>("returnImage") ?: false 134 val returnImage: Boolean = call.argument<Boolean>("returnImage") ?: false
134 val speed: Int = call.argument<Int>("speed") ?: 1 135 val speed: Int = call.argument<Int>("speed") ?: 1
135 val timeout: Int = call.argument<Int>("timeout") ?: 250 136 val timeout: Int = call.argument<Int>("timeout") ?: 250
  137 + val androidResolutionValueList: List<Int>? = call.argument<List<Int>>("androidResolution")
  138 + val androidResolution: Size? = if (androidResolutionValueList != null) {
  139 + Size(androidResolutionValueList[0], androidResolutionValueList[1])
  140 + } else {
  141 + null
  142 + }
136 143
137 var barcodeScannerOptions: BarcodeScannerOptions? = null 144 var barcodeScannerOptions: BarcodeScannerOptions? = null
138 if (formats != null) { 145 if (formats != null) {
@@ -164,7 +171,8 @@ class MobileScannerHandler( @@ -164,7 +171,8 @@ class MobileScannerHandler(
164 "torchable" to it.hasFlashUnit 171 "torchable" to it.hasFlashUnit
165 )) 172 ))
166 }, 173 },
167 - timeout.toLong()) 174 + timeout.toLong(),
  175 + androidResolution)
168 176
169 } catch (e: AlreadyStarted) { 177 } catch (e: AlreadyStarted) {
170 result.error( 178 result.error(
@@ -246,4 +254,4 @@ class MobileScannerHandler( @@ -246,4 +254,4 @@ class MobileScannerHandler(
246 private fun updateScanWindow(call: MethodCall) { 254 private fun updateScanWindow(call: MethodCall) {
247 mobileScanner!!.scanWindow = call.argument<List<Float>?>("rect") 255 mobileScanner!!.scanWindow = call.argument<List<Float>?>("rect")
248 } 256 }
249 -}  
  257 +}
@@ -213,18 +213,28 @@ public class MobileScanner: NSObject, AVCaptureVideoDataOutputSampleBufferDelega @@ -213,18 +213,28 @@ public class MobileScanner: NSObject, AVCaptureVideoDataOutputSampleBufferDelega
213 } catch { 213 } catch {
214 print("Failed to reset zoom scale") 214 print("Failed to reset zoom scale")
215 } 215 }
216 -  
217 - let dimensions = CMVideoFormatDescriptionGetDimensions(self.device.activeFormat.formatDescription)  
218 -  
219 - DispatchQueue.main.async {  
220 - completion(  
221 - MobileScannerStartParameters(  
222 - width: Double(dimensions.height),  
223 - height: Double(dimensions.width),  
224 - hasTorch: self.device.hasTorch,  
225 - textureId: self.textureId 216 +
  217 + if let device = self.device {
  218 + let dimensions = CMVideoFormatDescriptionGetDimensions(
  219 + device.activeFormat.formatDescription)
  220 + let hasTorch = device.hasTorch
  221 +
  222 + DispatchQueue.main.async {
  223 + completion(
  224 + MobileScannerStartParameters(
  225 + width: Double(dimensions.height),
  226 + height: Double(dimensions.width),
  227 + hasTorch: hasTorch,
  228 + textureId: self.textureId ?? 0
  229 + )
226 ) 230 )
227 - ) 231 + }
  232 +
  233 + return
  234 + }
  235 +
  236 + DispatchQueue.main.async {
  237 + completion(MobileScannerStartParameters())
228 } 238 }
229 } 239 }
230 } 240 }
@@ -6,6 +6,7 @@ import 'package:mobile_scanner/src/mobile_scanner_controller.dart'; @@ -6,6 +6,7 @@ import 'package:mobile_scanner/src/mobile_scanner_controller.dart';
6 import 'package:mobile_scanner/src/mobile_scanner_exception.dart'; 6 import 'package:mobile_scanner/src/mobile_scanner_exception.dart';
7 import 'package:mobile_scanner/src/objects/barcode_capture.dart'; 7 import 'package:mobile_scanner/src/objects/barcode_capture.dart';
8 import 'package:mobile_scanner/src/objects/mobile_scanner_arguments.dart'; 8 import 'package:mobile_scanner/src/objects/mobile_scanner_arguments.dart';
  9 +import 'package:mobile_scanner/src/scan_window_calculation.dart';
9 10
10 /// The function signature for the error builder. 11 /// The function signature for the error builder.
11 typedef MobileScannerErrorBuilder = Widget Function( 12 typedef MobileScannerErrorBuilder = Widget Function(
@@ -175,75 +176,6 @@ class _MobileScannerState extends State<MobileScanner> @@ -175,75 +176,6 @@ class _MobileScannerState extends State<MobileScanner>
175 } 176 }
176 } 177 }
177 178
178 - /// the [scanWindow] rect will be relative and scaled to the [widgetSize] not the texture. so it is possible,  
179 - /// depending on the [fit], for the [scanWindow] to partially or not at all overlap the [textureSize]  
180 - ///  
181 - /// since when using a [BoxFit] the content will always be centered on its parent. we can convert the rect  
182 - /// to be relative to the texture.  
183 - ///  
184 - /// since the textures size and the actuall image (on the texture size) might not be the same, we also need to  
185 - /// calculate the scanWindow in terms of percentages of the texture, not pixels.  
186 - Rect calculateScanWindowRelativeToTextureInPercentage(  
187 - BoxFit fit,  
188 - Rect scanWindow,  
189 - Size textureSize,  
190 - Size widgetSize,  
191 - ) {  
192 - double fittedTextureWidth;  
193 - double fittedTextureHeight;  
194 -  
195 - switch (fit) {  
196 - case BoxFit.contain:  
197 - final widthRatio = widgetSize.width / textureSize.width;  
198 - final heightRatio = widgetSize.height / textureSize.height;  
199 - final scale = widthRatio < heightRatio ? widthRatio : heightRatio;  
200 - fittedTextureWidth = textureSize.width * scale;  
201 - fittedTextureHeight = textureSize.height * scale;  
202 - break;  
203 -  
204 - case BoxFit.cover:  
205 - final widthRatio = widgetSize.width / textureSize.width;  
206 - final heightRatio = widgetSize.height / textureSize.height;  
207 - final scale = widthRatio > heightRatio ? widthRatio : heightRatio;  
208 - fittedTextureWidth = textureSize.width * scale;  
209 - fittedTextureHeight = textureSize.height * scale;  
210 - break;  
211 -  
212 - case BoxFit.fill:  
213 - fittedTextureWidth = widgetSize.width;  
214 - fittedTextureHeight = widgetSize.height;  
215 - break;  
216 -  
217 - case BoxFit.fitHeight:  
218 - final ratio = widgetSize.height / textureSize.height;  
219 - fittedTextureWidth = textureSize.width * ratio;  
220 - fittedTextureHeight = widgetSize.height;  
221 - break;  
222 -  
223 - case BoxFit.fitWidth:  
224 - final ratio = widgetSize.width / textureSize.width;  
225 - fittedTextureWidth = widgetSize.width;  
226 - fittedTextureHeight = textureSize.height * ratio;  
227 - break;  
228 -  
229 - case BoxFit.none:  
230 - case BoxFit.scaleDown:  
231 - fittedTextureWidth = textureSize.width;  
232 - fittedTextureHeight = textureSize.height;  
233 - break;  
234 - }  
235 -  
236 - final offsetX = (widgetSize.width - fittedTextureWidth) / 2;  
237 - final offsetY = (widgetSize.height - fittedTextureHeight) / 2;  
238 -  
239 - final left = (scanWindow.left - offsetX) / fittedTextureWidth;  
240 - final top = (scanWindow.top - offsetY) / fittedTextureHeight;  
241 - final right = (scanWindow.right - offsetX) / fittedTextureWidth;  
242 - final bottom = (scanWindow.bottom - offsetY) / fittedTextureHeight;  
243 -  
244 - return Rect.fromLTRB(left, top, right, bottom);  
245 - }  
246 -  
247 Rect? scanWindow; 179 Rect? scanWindow;
248 180
249 @override 181 @override
@@ -261,8 +193,8 @@ class _MobileScannerState extends State<MobileScanner> @@ -261,8 +193,8 @@ class _MobileScannerState extends State<MobileScanner>
261 scanWindow = calculateScanWindowRelativeToTextureInPercentage( 193 scanWindow = calculateScanWindowRelativeToTextureInPercentage(
262 widget.fit, 194 widget.fit,
263 widget.scanWindow!, 195 widget.scanWindow!,
264 - value.size,  
265 - Size(constraints.maxWidth, constraints.maxHeight), 196 + textureSize: value.size,
  197 + widgetSize: constraints.biggest,
266 ); 198 );
267 199
268 _controller.updateScanWindow(scanWindow); 200 _controller.updateScanWindow(scanWindow);
@@ -23,6 +23,7 @@ class MobileScannerController { @@ -23,6 +23,7 @@ class MobileScannerController {
23 ) 23 )
24 this.onPermissionSet, 24 this.onPermissionSet,
25 this.autoStart = true, 25 this.autoStart = true,
  26 + this.androidResolution,
26 }); 27 });
27 28
28 /// Select which camera should be used. 29 /// Select which camera should be used.
@@ -58,9 +59,25 @@ class MobileScannerController { @@ -58,9 +59,25 @@ class MobileScannerController {
58 /// Automatically start the mobileScanner on initialization. 59 /// Automatically start the mobileScanner on initialization.
59 final bool autoStart; 60 final bool autoStart;
60 61
  62 + /// Can be used to override default Android camera resolution.
  63 + /// The default camera resolution is 640x480.
  64 + /// Overriding the resolution can change the camera aspect ratio.
  65 + ///
  66 + /// Example: androidResolution: Size(1920, 2560);
  67 + ///
  68 + /// NOTE:
  69 + /// Values inside this Size will be converted to integer type.
  70 + ///
  71 + /// The package Android implementation will manage itself the orientation.
  72 + /// You don't need to update this parameter if orientation change.
  73 + ///
  74 + /// Android will take the closest resolution available if the overrided one can't be set
  75 + final Size? androidResolution;
  76 +
61 /// Sets the barcode stream 77 /// Sets the barcode stream
62 final StreamController<BarcodeCapture> _barcodesController = 78 final StreamController<BarcodeCapture> _barcodesController =
63 StreamController.broadcast(); 79 StreamController.broadcast();
  80 +
64 Stream<BarcodeCapture> get barcodes => _barcodesController.stream; 81 Stream<BarcodeCapture> get barcodes => _barcodesController.stream;
65 82
66 static const MethodChannel _methodChannel = 83 static const MethodChannel _methodChannel =
@@ -133,6 +150,12 @@ class MobileScannerController { @@ -133,6 +150,12 @@ class MobileScannerController {
133 arguments['formats'] = formats!.map((e) => e.rawValue).toList(); 150 arguments['formats'] = formats!.map((e) => e.rawValue).toList();
134 } else if (Platform.isAndroid) { 151 } else if (Platform.isAndroid) {
135 arguments['formats'] = formats!.map((e) => e.index).toList(); 152 arguments['formats'] = formats!.map((e) => e.index).toList();
  153 + if (androidResolution != null) {
  154 + arguments['androidResolution'] = <int>[
  155 + androidResolution!.width.toInt(),
  156 + androidResolution!.height.toInt(),
  157 + ];
  158 + }
136 } 159 }
137 } 160 }
138 arguments['returnImage'] = returnImage; 161 arguments['returnImage'] = returnImage;
@@ -384,6 +407,7 @@ class MobileScannerController { @@ -384,6 +407,7 @@ class MobileScannerController {
384 barcodes: [ 407 barcodes: [
385 Barcode( 408 Barcode(
386 rawValue: (data as Map)['payload'] as String?, 409 rawValue: (data as Map)['payload'] as String?,
  410 + format: toFormat(data['symbology'] as int),
387 ), 411 ),
388 ], 412 ],
389 ), 413 ),
  1 +import 'dart:math';
  2 +
  3 +import 'package:flutter/rendering.dart';
  4 +
  5 +/// Calculate the scan window rectangle relative to the texture size.
  6 +///
  7 +/// The [scanWindow] rectangle will be relative and scaled to [widgetSize], not [textureSize].
  8 +/// Depending on the given [fit], the [scanWindow] can partially overlap the [textureSize],
  9 +/// or not at all.
  10 +///
  11 +/// Due to using [BoxFit] the content will always be centered on its parent,
  12 +/// which enables converting the rectangle to be relative to the texture.
  13 +///
  14 +/// Because the size of the actual texture and the size of the texture in widget-space
  15 +/// can be different, calculate the size of the scan window in percentages,
  16 +/// rather than pixels.
  17 +///
  18 +/// Returns a [Rect] that represents the position and size of the scan window in the texture.
  19 +Rect calculateScanWindowRelativeToTextureInPercentage(
  20 + BoxFit fit,
  21 + Rect scanWindow, {
  22 + required Size textureSize,
  23 + required Size widgetSize,
  24 +}) {
  25 + // Convert the texture size to a size in widget-space, with the box fit applied.
  26 + final fittedTextureSize = applyBoxFit(fit, textureSize, widgetSize);
  27 +
  28 + // Get the correct scaling values depending on the given BoxFit mode
  29 + double sx = fittedTextureSize.destination.width / textureSize.width;
  30 + double sy = fittedTextureSize.destination.height / textureSize.height;
  31 +
  32 + switch (fit) {
  33 + case BoxFit.fill:
  34 + // No-op, just use sx and sy.
  35 + break;
  36 + case BoxFit.contain:
  37 + final s = min(sx, sy);
  38 + sx = s;
  39 + sy = s;
  40 + break;
  41 + case BoxFit.cover:
  42 + final s = max(sx, sy);
  43 + sx = s;
  44 + sy = s;
  45 + break;
  46 + case BoxFit.fitWidth:
  47 + sy = sx;
  48 + break;
  49 + case BoxFit.fitHeight:
  50 + sx = sy;
  51 + break;
  52 + case BoxFit.none:
  53 + sx = 1.0;
  54 + sy = 1.0;
  55 + break;
  56 + case BoxFit.scaleDown:
  57 + final s = min(sx, sy);
  58 + sx = s;
  59 + sy = s;
  60 + break;
  61 + }
  62 +
  63 + // Fit the texture size to the widget rectangle given by the scaling values above.
  64 + final textureWindow = Alignment.center.inscribe(
  65 + Size(textureSize.width * sx, textureSize.height * sy),
  66 + Rect.fromLTWH(0, 0, widgetSize.width, widgetSize.height),
  67 + );
  68 +
  69 + // Transform the scan window from widget coordinates to texture coordinates.
  70 + final scanWindowInTexSpace = Rect.fromLTRB(
  71 + (1 / sx) * (scanWindow.left - textureWindow.left),
  72 + (1 / sy) * (scanWindow.top - textureWindow.top),
  73 + (1 / sx) * (scanWindow.right - textureWindow.left),
  74 + (1 / sy) * (scanWindow.bottom - textureWindow.top),
  75 + );
  76 +
  77 + // Clip the scan window in texture coordinates with the texture bounds.
  78 + // This prevents percentages outside the range [0; 1].
  79 + final clippedScanWndInTexSpace = scanWindowInTexSpace.intersect(
  80 + Rect.fromLTWH(0, 0, textureSize.width, textureSize.height),
  81 + );
  82 +
  83 + // Compute relative rectangle coordinates,
  84 + // with respect to the texture size, i.e. scan image.
  85 + final percentageLeft = clippedScanWndInTexSpace.left / textureSize.width;
  86 + final percentageTop = clippedScanWndInTexSpace.top / textureSize.height;
  87 + final percentageRight = clippedScanWndInTexSpace.right / textureSize.width;
  88 + final percentageBottom = clippedScanWndInTexSpace.bottom / textureSize.height;
  89 +
  90 + // This rectangle can be used to cut out a rectangle of the scan image.
  91 + return Rect.fromLTRB(
  92 + percentageLeft,
  93 + percentageTop,
  94 + percentageRight,
  95 + percentageBottom,
  96 + );
  97 +}
@@ -2,6 +2,7 @@ import AVFoundation @@ -2,6 +2,7 @@ import AVFoundation
2 import FlutterMacOS 2 import FlutterMacOS
3 import Vision 3 import Vision
4 import AppKit 4 import AppKit
  5 +import VideoToolbox
5 6
6 public class MobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler, FlutterTexture, AVCaptureVideoDataOutputSampleBufferDelegate { 7 public class MobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler, FlutterTexture, AVCaptureVideoDataOutputSampleBufferDelegate {
7 8
@@ -17,8 +18,8 @@ public class MobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler, @@ -17,8 +18,8 @@ public class MobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler,
17 var captureSession: AVCaptureSession! 18 var captureSession: AVCaptureSession!
18 19
19 // The selected camera 20 // The selected camera
20 - var device: AVCaptureDevice!  
21 - 21 + weak var device: AVCaptureDevice!
  22 +
22 // Image to be sent to the texture 23 // Image to be sent to the texture
23 var latestBuffer: CVImageBuffer! 24 var latestBuffer: CVImageBuffer!
24 25
@@ -28,6 +29,8 @@ public class MobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler, @@ -28,6 +29,8 @@ public class MobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler,
28 var detectionSpeed: DetectionSpeed = DetectionSpeed.noDuplicates 29 var detectionSpeed: DetectionSpeed = DetectionSpeed.noDuplicates
29 30
30 var timeoutSeconds: Double = 0 31 var timeoutSeconds: Double = 0
  32 +
  33 + var symbologies:[VNBarcodeSymbology] = []
31 34
32 35
33 // var analyzeMode: Int = 0 36 // var analyzeMode: Int = 0
@@ -93,7 +96,7 @@ public class MobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler, @@ -93,7 +96,7 @@ public class MobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler,
93 } 96 }
94 97
95 var nextScanTime = 0.0 98 var nextScanTime = 0.0
96 - var imagesCurrentlyBeingProcessed = 0 99 + var imagesCurrentlyBeingProcessed = false
97 100
98 // Gets called when a new image is added to the buffer 101 // Gets called when a new image is added to the buffer
99 public func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { 102 public func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
@@ -109,45 +112,53 @@ public class MobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler, @@ -109,45 +112,53 @@ public class MobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler,
109 registry.textureFrameAvailable(textureId) 112 registry.textureFrameAvailable(textureId)
110 113
111 let currentTime = Date().timeIntervalSince1970 114 let currentTime = Date().timeIntervalSince1970
112 - let eligibleForScan = currentTime > nextScanTime && imagesCurrentlyBeingProcessed == 0; 115 + let eligibleForScan = currentTime > nextScanTime && !imagesCurrentlyBeingProcessed
113 if ((detectionSpeed == DetectionSpeed.normal || detectionSpeed == DetectionSpeed.noDuplicates) && eligibleForScan || detectionSpeed == DetectionSpeed.unrestricted) { 116 if ((detectionSpeed == DetectionSpeed.normal || detectionSpeed == DetectionSpeed.noDuplicates) && eligibleForScan || detectionSpeed == DetectionSpeed.unrestricted) {
114 nextScanTime = currentTime + timeoutSeconds 117 nextScanTime = currentTime + timeoutSeconds
115 - imagesCurrentlyBeingProcessed += 1  
116 - let imageRequestHandler = VNImageRequestHandler(  
117 - cvPixelBuffer: latestBuffer,  
118 - orientation: .right)  
119 - 118 + imagesCurrentlyBeingProcessed = true
  119 + DispatchQueue.global(qos: .userInitiated).async { [weak self] in
  120 + if(self!.latestBuffer == nil){
  121 + return
  122 + }
  123 + var cgImage: CGImage?
  124 + VTCreateCGImageFromCVPixelBuffer(self!.latestBuffer, options: nil, imageOut: &cgImage)
  125 + let imageRequestHandler = VNImageRequestHandler(cgImage: cgImage!)
120 do { 126 do {
121 - try imageRequestHandler.perform([VNDetectBarcodesRequest { [self] (request, error) in  
122 - imagesCurrentlyBeingProcessed -= 1  
123 - if error == nil {  
124 - if let results = request.results as? [VNBarcodeObservation] { 127 + let barcodeRequest:VNDetectBarcodesRequest = VNDetectBarcodesRequest(completionHandler: { [weak self] (request, error) in
  128 + self?.imagesCurrentlyBeingProcessed = false
  129 + if error == nil {
  130 + if let results = request.results as? [VNBarcodeObservation] {
125 for barcode in results { 131 for barcode in results {
126 - if self.scanWindow != nil {  
127 - let match = self.isbarCodeInScanWindow(self.scanWindow!, barcode, self.latestBuffer) 132 + if self?.scanWindow != nil && cgImage != nil {
  133 + let match = self?.isBarCodeInScanWindow(self!.scanWindow!, barcode, cgImage!) ?? false
128 if (!match) { 134 if (!match) {
129 continue 135 continue
130 } 136 }
131 } 137 }
132 138
133 - let barcodeType = String(barcode.symbology.rawValue).replacingOccurrences(of: "VNBarcodeSymbology", with: "")  
134 - let event: [String: Any?] = ["name": "barcodeMac", "data" : ["payload": barcode.payloadStringValue, "symbology": barcodeType]]  
135 - self.sink?(event)  
136 -  
137 - // if barcodeType == "QR" {  
138 - // let image = CIImage(image: source)  
139 - // image?.cropping(to: barcode.boundingBox)  
140 - // self.qrCodeDescriptor(qrCode: barcode, qrCodeImage: image!)  
141 - // }  
142 - }  
143 - }  
144 - } else {  
145 - print(error!.localizedDescription)  
146 - }  
147 - }])  
148 - } catch {  
149 - print(error) 139 + DispatchQueue.main.async {
  140 + self?.sink?(["name": "barcodeMac", "data" : ["payload": barcode.payloadStringValue, "symbology": barcode.symbology.toInt as Any?]] as [String : Any])
  141 + }
  142 +// if barcodeType == "QR" {
  143 +// let image = CIImage(image: source)
  144 +// image?.cropping(to: barcode.boundingBox)
  145 +// self.qrCodeDescriptor(qrCode: barcode, qrCodeImage: image!)
  146 +// }
  147 + }
  148 + }
  149 + } else {
  150 + self?.sink?(FlutterError(code: "MobileScanner", message: error?.localizedDescription, details: nil))
  151 + }
  152 + })
  153 + if(self?.symbologies.isEmpty == false){
  154 + // add the symbologies the user wishes to support
  155 + barcodeRequest.symbologies = self!.symbologies
  156 + }
  157 + try imageRequestHandler.perform([barcodeRequest])
  158 + } catch let e {
  159 + self?.sink?(FlutterError(code: "MobileScanner", message: e.localizedDescription, details: nil))
150 } 160 }
  161 + }
151 } 162 }
152 } 163 }
153 164
@@ -180,10 +191,10 @@ public class MobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler, @@ -180,10 +191,10 @@ public class MobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler,
180 let scanWindowData: Array? = argReader.floatArray(key: "rect") 191 let scanWindowData: Array? = argReader.floatArray(key: "rect")
181 192
182 if (scanWindowData == nil) { 193 if (scanWindowData == nil) {
183 - return 194 + return
184 } 195 }
185 196
186 - let minX = scanWindowData![0] 197 + let minX = scanWindowData![0]
187 let minY = scanWindowData![1] 198 let minY = scanWindowData![1]
188 199
189 let width = scanWindowData![2] - minX 200 let width = scanWindowData![2] - minX
@@ -191,9 +202,23 @@ public class MobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler, @@ -191,9 +202,23 @@ public class MobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler,
191 202
192 scanWindow = CGRect(x: minX, y: minY, width: width, height: height) 203 scanWindow = CGRect(x: minX, y: minY, width: width, height: height)
193 } 204 }
  205 +
  206 + func isBarCodeInScanWindow(_ scanWindow: CGRect, _ barcode: VNBarcodeObservation, _ inputImage: CGImage) -> Bool {
194 207
195 - func isbarCodeInScanWindow(_ scanWindow: CGRect, _ barcode: VNBarcodeObservation, _ inputImage: CVImageBuffer) -> Bool {  
196 - let size = CVImageBufferGetEncodedSize(inputImage) 208 + let imageWidth = CGFloat(inputImage.width);
  209 + let imageHeight = CGFloat(inputImage.height);
  210 +
  211 + let minX = scanWindow.minX * imageWidth
  212 + let minY = scanWindow.minY * imageHeight
  213 + let width = scanWindow.width * imageWidth
  214 + let height = scanWindow.height * imageHeight
  215 +
  216 + let scaledScanWindow = CGRect(x: minX, y: minY, width: width, height: height)
  217 + return scaledScanWindow.contains(barcode.boundingBox)
  218 + }
  219 +
  220 + func isBarCodeInScanWindow(_ scanWindow: CGRect, _ barcode: VNBarcodeObservation, _ inputImage: CVImageBuffer) -> Bool {
  221 + let size = CVImageBufferGetEncodedSize(inputImage)
197 222
198 let imageWidth = size.width; 223 let imageWidth = size.width;
199 let imageHeight = size.height; 224 let imageHeight = size.height;
@@ -210,23 +235,24 @@ public class MobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler, @@ -210,23 +235,24 @@ public class MobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler,
210 func start(_ call: FlutterMethodCall, _ result: @escaping FlutterResult) { 235 func start(_ call: FlutterMethodCall, _ result: @escaping FlutterResult) {
211 if (device != nil) { 236 if (device != nil) {
212 result(FlutterError(code: "MobileScanner", 237 result(FlutterError(code: "MobileScanner",
213 - message: "Called start() while already started!",  
214 - details: nil)) 238 + message: "Called start() while already started!",
  239 + details: nil))
215 return 240 return
216 } 241 }
217 - 242 +
218 textureId = registry.register(self) 243 textureId = registry.register(self)
219 captureSession = AVCaptureSession() 244 captureSession = AVCaptureSession()
220 - 245 +
221 let argReader = MapArgumentReader(call.arguments as? [String: Any]) 246 let argReader = MapArgumentReader(call.arguments as? [String: Any])
222 -  
223 -// let ratio: Int = argReader.int(key: "ratio")  
224 - let torch: Bool = argReader.bool(key: "torch") ?? false  
225 - let facing: Int = argReader.int(key: "facing") ?? 1  
226 - let speed: Int = (call.arguments as! Dictionary<String, Any?>)["speed"] as? Int ?? 0  
227 - let timeoutMs: Int = (call.arguments as! Dictionary<String, Any?>)["timeout"] as? Int ?? 0  
228 247
229 - timeoutSeconds = Double(timeoutMs) * 1000.0 248 + // let ratio: Int = argReader.int(key: "ratio")
  249 + let torch:Bool = argReader.bool(key: "torch") ?? false
  250 + let facing:Int = argReader.int(key: "facing") ?? 1
  251 + let speed:Int = argReader.int(key: "speed") ?? 0
  252 + let timeoutMs:Int = argReader.int(key: "timeout") ?? 0
  253 + symbologies = argReader.toSymbology()
  254 +
  255 + timeoutSeconds = Double(timeoutMs) / 1000.0
230 detectionSpeed = DetectionSpeed(rawValue: speed)! 256 detectionSpeed = DetectionSpeed(rawValue: speed)!
231 257
232 // Set the camera to use 258 // Set the camera to use
@@ -241,8 +267,8 @@ public class MobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler, @@ -241,8 +267,8 @@ public class MobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler,
241 267
242 if (device == nil) { 268 if (device == nil) {
243 result(FlutterError(code: "MobileScanner", 269 result(FlutterError(code: "MobileScanner",
244 - message: "No camera found or failed to open camera!",  
245 - details: nil)) 270 + message: "No camera found or failed to open camera!",
  271 + details: nil))
246 return 272 return
247 } 273 }
248 274
@@ -250,10 +276,10 @@ public class MobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler, @@ -250,10 +276,10 @@ public class MobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler,
250 if (device.hasTorch) { 276 if (device.hasTorch) {
251 do { 277 do {
252 try device.lockForConfiguration() 278 try device.lockForConfiguration()
253 - device.torchMode = torch ? .on : .off  
254 - device.unlockForConfiguration()  
255 - } catch {  
256 - result(FlutterError(code: error.localizedDescription, message: nil, details: nil)) 279 + device.torchMode = torch ? .on : .off
  280 + device.unlockForConfiguration()
  281 + } catch {
  282 + result(FlutterError(code: error.localizedDescription, message: nil, details: nil))
257 } 283 }
258 } 284 }
259 285
@@ -277,7 +303,7 @@ public class MobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler, @@ -277,7 +303,7 @@ public class MobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler,
277 videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue.main) 303 videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue.main)
278 captureSession.addOutput(videoOutput) 304 captureSession.addOutput(videoOutput)
279 for connection in videoOutput.connections { 305 for connection in videoOutput.connections {
280 -// connection.videoOrientation = .portrait 306 + // connection.videoOrientation = .portrait
281 if position == .front && connection.isVideoMirroringSupported { 307 if position == .front && connection.isVideoMirroringSupported {
282 connection.isVideoMirrored = true 308 connection.isVideoMirrored = true
283 } 309 }
@@ -351,30 +377,117 @@ public class MobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler, @@ -351,30 +377,117 @@ public class MobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler,
351 377
352 class MapArgumentReader { 378 class MapArgumentReader {
353 379
354 - let args: [String: Any]?  
355 -  
356 - init(_ args: [String: Any]?) {  
357 - self.args = args  
358 - }  
359 -  
360 - func string(key: String) -> String? {  
361 - return args?[key] as? String  
362 - }  
363 -  
364 - func int(key: String) -> Int? {  
365 - return (args?[key] as? NSNumber)?.intValue  
366 - }  
367 - 380 + let args: [String: Any]?
  381 +
  382 + init(_ args: [String: Any]?) {
  383 + self.args = args
  384 + }
  385 +
  386 + func string(key: String) -> String? {
  387 + return args?[key] as? String
  388 + }
  389 +
  390 + func int(key: String) -> Int? {
  391 + return (args?[key] as? NSNumber)?.intValue
  392 + }
  393 +
368 func bool(key: String) -> Bool? { 394 func bool(key: String) -> Bool? {
369 - return (args?[key] as? NSNumber)?.boolValue 395 + return (args?[key] as? NSNumber)?.boolValue
  396 + }
  397 +
  398 + func stringArray(key: String) -> [String]? {
  399 + return args?[key] as? [String]
  400 + }
  401 +
  402 + func toSymbology() -> [VNBarcodeSymbology] {
  403 + guard let syms:[Int] = args?["formats"] as? [Int] else {
  404 + return []
  405 + }
  406 + if(syms.contains(0)){
  407 + return []
  408 + }
  409 + var barcodeFormats:[VNBarcodeSymbology] = []
  410 + syms.forEach { id in
  411 + if let bc:VNBarcodeSymbology = VNBarcodeSymbology.fromInt(id) {
  412 + barcodeFormats.append(bc)
  413 + }
  414 + }
  415 + return barcodeFormats
370 } 416 }
371 417
372 - func stringArray(key: String) -> [String]? {  
373 - return args?[key] as? [String]  
374 - } 418 + func floatArray(key: String) -> [CGFloat]? {
  419 + return args?[key] as? [CGFloat]
  420 + }
  421 +
  422 +}
375 423
376 - func floatArray(key: String) -> [CGFloat]? {  
377 - return args?[key] as? [CGFloat]  
378 - } 424 +extension VNBarcodeSymbology {
379 425
  426 + static func fromInt(_ mapValue:Int) -> VNBarcodeSymbology? {
  427 + if #available(macOS 12.0, *) {
  428 + if(mapValue == 8){
  429 + return VNBarcodeSymbology.codabar
  430 + }
  431 + }
  432 + switch(mapValue){
  433 + case 1:
  434 + return VNBarcodeSymbology.code128
  435 + case 2:
  436 + return VNBarcodeSymbology.code39
  437 + case 4:
  438 + return VNBarcodeSymbology.code93
  439 + case 16:
  440 + return VNBarcodeSymbology.dataMatrix
  441 + case 32:
  442 + return VNBarcodeSymbology.ean13
  443 + case 64:
  444 + return VNBarcodeSymbology.ean8
  445 + case 128:
  446 + return VNBarcodeSymbology.itf14
  447 + case 256:
  448 + return VNBarcodeSymbology.qr
  449 + case 1024:
  450 + return VNBarcodeSymbology.upce
  451 + case 2048:
  452 + return VNBarcodeSymbology.pdf417
  453 + case 4096:
  454 + return VNBarcodeSymbology.aztec
  455 + default:
  456 + return nil
  457 + }
  458 + }
  459 +
  460 + var toInt:Int? {
  461 + if #available(macOS 12.0, *) {
  462 + if(self == VNBarcodeSymbology.codabar){
  463 + return 8
  464 + }
  465 + }
  466 + switch(self){
  467 + case VNBarcodeSymbology.code128:
  468 + return 1
  469 + case VNBarcodeSymbology.code39:
  470 + return 2
  471 + case VNBarcodeSymbology.code93:
  472 + return 4
  473 + case VNBarcodeSymbology.dataMatrix:
  474 + return 16
  475 + case VNBarcodeSymbology.ean13:
  476 + return 32
  477 + case VNBarcodeSymbology.ean8:
  478 + return 64
  479 + case VNBarcodeSymbology.itf14:
  480 + return 128
  481 + case VNBarcodeSymbology.qr:
  482 + return 256
  483 + case VNBarcodeSymbology.upce:
  484 + return 1024
  485 + case VNBarcodeSymbology.pdf417:
  486 + return 2048
  487 + case VNBarcodeSymbology.aztec:
  488 + return 4096
  489 + default:
  490 + return -1;
  491 + }
  492 + }
380 } 493 }
  1 +import 'package:flutter/painting.dart';
  2 +import 'package:flutter_test/flutter_test.dart';
  3 +import 'package:mobile_scanner/src/scan_window_calculation.dart';
  4 +
  5 +void main() {
  6 + group(
  7 + 'Scan window relative to texture',
  8 + () {
  9 + group('Widget (landscape) smaller than texture (portrait)', () {
  10 + const textureSize = Size(480.0, 640.0);
  11 + const widgetSize = Size(432.0, 256.0);
  12 + final ctx = ScanWindowTestContext(
  13 + textureSize: textureSize,
  14 + widgetSize: widgetSize,
  15 + scanWindow: Rect.fromLTWH(
  16 + widgetSize.width / 4,
  17 + widgetSize.height / 4,
  18 + widgetSize.width / 2,
  19 + widgetSize.height / 2,
  20 + ),
  21 + );
  22 +
  23 + test('wl tp: BoxFit.none', () {
  24 + ctx.testScanWindow(
  25 + BoxFit.none,
  26 + const Rect.fromLTRB(0.275, 0.4, 0.725, 0.6),
  27 + );
  28 + });
  29 +
  30 + test('wl tp: BoxFit.fill', () {
  31 + ctx.testScanWindow(
  32 + BoxFit.fill,
  33 + const Rect.fromLTRB(0.25, 0.25, 0.75, 0.75),
  34 + );
  35 + });
  36 +
  37 + test('wl tp: BoxFit.fitHeight', () {
  38 + ctx.testScanWindow(
  39 + BoxFit.fitHeight,
  40 + const Rect.fromLTRB(0.0, 0.25, 1.0, 0.75),
  41 + );
  42 + });
  43 +
  44 + test('wl tp: BoxFit.fitWidth', () {
  45 + ctx.testScanWindow(
  46 + BoxFit.fitWidth,
  47 + const Rect.fromLTRB(
  48 + 0.25,
  49 + 0.38888888888888895,
  50 + 0.75,
  51 + 0.6111111111111112,
  52 + ),
  53 + );
  54 + });
  55 +
  56 + test('wl tp: BoxFit.cover', () {
  57 + // equal to fitWidth
  58 + ctx.testScanWindow(
  59 + BoxFit.cover,
  60 + const Rect.fromLTRB(
  61 + 0.25,
  62 + 0.38888888888888895,
  63 + 0.75,
  64 + 0.6111111111111112,
  65 + ),
  66 + );
  67 + });
  68 +
  69 + test('wl tp: BoxFit.contain', () {
  70 + // equal to fitHeigth
  71 + ctx.testScanWindow(
  72 + BoxFit.contain,
  73 + const Rect.fromLTRB(0.0, 0.25, 1.0, 0.75),
  74 + );
  75 + });
  76 +
  77 + test('wl tp: BoxFit.scaleDown', () {
  78 + // equal to fitHeigth, contain
  79 + ctx.testScanWindow(
  80 + BoxFit.scaleDown,
  81 + const Rect.fromLTRB(0.0, 0.25, 1.0, 0.75),
  82 + );
  83 + });
  84 + });
  85 +
  86 + group('Widget (landscape) smaller than texture and texture (landscape)',
  87 + () {
  88 + const textureSize = Size(640.0, 480.0);
  89 + const widgetSize = Size(320.0, 120.0);
  90 + final ctx = ScanWindowTestContext(
  91 + textureSize: textureSize,
  92 + widgetSize: widgetSize,
  93 + scanWindow: Rect.fromLTWH(
  94 + widgetSize.width / 4,
  95 + widgetSize.height / 4,
  96 + widgetSize.width / 2,
  97 + widgetSize.height / 2,
  98 + ),
  99 + );
  100 +
  101 + test('wl tl: BoxFit.none', () {
  102 + ctx.testScanWindow(
  103 + BoxFit.none,
  104 + const Rect.fromLTRB(0.375, 0.4375, 0.625, 0.5625),
  105 + );
  106 + });
  107 +
  108 + test('wl tl: BoxFit.fill', () {
  109 + ctx.testScanWindow(
  110 + BoxFit.fill,
  111 + const Rect.fromLTRB(0.25, 0.25, 0.75, 0.75),
  112 + );
  113 + });
  114 +
  115 + test('wl tl: BoxFit.fitHeight', () {
  116 + ctx.testScanWindow(
  117 + BoxFit.fitHeight,
  118 + const Rect.fromLTRB(0.0, 0.25, 1.0, 0.75),
  119 + );
  120 + });
  121 +
  122 + test('wl tl: BoxFit.fitWidth', () {
  123 + ctx.testScanWindow(
  124 + BoxFit.fitWidth,
  125 + const Rect.fromLTRB(0.25, 0.375, 0.75, 0.625),
  126 + );
  127 + });
  128 +
  129 + test('wl tl: BoxFit.cover', () {
  130 + // equal to fitWidth
  131 + ctx.testScanWindow(
  132 + BoxFit.cover,
  133 + const Rect.fromLTRB(0.25, 0.375, 0.75, 0.625),
  134 + );
  135 + });
  136 +
  137 + test('wl tl: BoxFit.contain', () {
  138 + // equal to fitHeigth
  139 + ctx.testScanWindow(
  140 + BoxFit.contain,
  141 + const Rect.fromLTRB(0.0, 0.25, 1.0, 0.75),
  142 + );
  143 + });
  144 +
  145 + test('wl tl: BoxFit.scaleDown', () {
  146 + // equal to fitHeigth, contain
  147 + ctx.testScanWindow(
  148 + BoxFit.scaleDown,
  149 + const Rect.fromLTRB(0.0, 0.25, 1.0, 0.75),
  150 + );
  151 + });
  152 + });
  153 + },
  154 + );
  155 +}
  156 +
  157 +class ScanWindowTestContext {
  158 + ScanWindowTestContext({
  159 + required this.textureSize,
  160 + required this.widgetSize,
  161 + required this.scanWindow,
  162 + });
  163 +
  164 + final Size textureSize;
  165 + final Size widgetSize;
  166 + final Rect scanWindow;
  167 +
  168 + void testScanWindow(BoxFit fit, Rect expected) {
  169 + final actual = calculateScanWindowRelativeToTextureInPercentage(
  170 + fit,
  171 + scanWindow,
  172 + textureSize: textureSize,
  173 + widgetSize: widgetSize,
  174 + );
  175 +
  176 + // don't use expect(actual, expected) because Rect.toString() only shows one digit after the comma which can be confusing
  177 + expect(actual.left, expected.left);
  178 + expect(actual.top, expected.top);
  179 + expect(actual.right, expected.right);
  180 + expect(actual.bottom, expected.bottom);
  181 + }
  182 +}