Committed by
Enguerrand_ARMINJON_MAC_2
Merge branch 'juliansteenbakker:master' into feature/increase-camera-quality
Showing
9 changed files
with
488 additions
and
118 deletions
| @@ -2,7 +2,7 @@ group 'dev.steenbakker.mobile_scanner' | @@ -2,7 +2,7 @@ group 'dev.steenbakker.mobile_scanner' | ||
| 2 | version '1.0-SNAPSHOT' | 2 | version '1.0-SNAPSHOT' |
| 3 | 3 | ||
| 4 | buildscript { | 4 | buildscript { |
| 5 | - ext.kotlin_version = '1.9.10' | 5 | + ext.kotlin_version = '1.7.22' |
| 6 | repositories { | 6 | repositories { |
| 7 | google() | 7 | google() |
| 8 | mavenCentral() | 8 | mavenCentral() |
| @@ -172,12 +172,12 @@ class MobileScanner( | @@ -172,12 +172,12 @@ class MobileScanner( | ||
| 172 | 172 | ||
| 173 | // Return the best resolution for the actual device orientation. | 173 | // Return the best resolution for the actual device orientation. |
| 174 | // By default camera set its resolution to width 480 and height 640 which is too low for ML KIT. | 174 | // By default camera set its resolution to width 480 and height 640 which is too low for ML KIT. |
| 175 | - // If we return an higher resolution than device can handle, camera package take the most relavant one available. | 175 | + // If we return an higher resolution than device can handle, camera package take the most relevant one available. |
| 176 | // Resolution set must take care of device orientation to preserve aspect ratio. | 176 | // Resolution set must take care of device orientation to preserve aspect ratio. |
| 177 | - private fun getResolution(windowManager: WindowManager): Size { | 177 | + private fun getResolution(windowManager: WindowManager, androidResolution: Size): Size { |
| 178 | val rotation = windowManager.defaultDisplay.rotation | 178 | val rotation = windowManager.defaultDisplay.rotation |
| 179 | - val widthMaxRes = 480 * 4; | ||
| 180 | - val heightMaxRes = 640 * 4; | 179 | + val widthMaxRes = androidResolution.width |
| 180 | + val heightMaxRes = androidResolution.height | ||
| 181 | 181 | ||
| 182 | val targetResolution = if (rotation == Surface.ROTATION_0 || rotation == Surface.ROTATION_180) { | 182 | val targetResolution = if (rotation == Surface.ROTATION_0 || rotation == Surface.ROTATION_180) { |
| 183 | Size(widthMaxRes, heightMaxRes) // Portrait mode | 183 | Size(widthMaxRes, heightMaxRes) // Portrait mode |
| @@ -201,7 +201,8 @@ class MobileScanner( | @@ -201,7 +201,8 @@ class MobileScanner( | ||
| 201 | torchStateCallback: TorchStateCallback, | 201 | torchStateCallback: TorchStateCallback, |
| 202 | zoomScaleStateCallback: ZoomScaleStateCallback, | 202 | zoomScaleStateCallback: ZoomScaleStateCallback, |
| 203 | mobileScannerStartedCallback: MobileScannerStartedCallback, | 203 | mobileScannerStartedCallback: MobileScannerStartedCallback, |
| 204 | - detectionTimeout: Long | 204 | + detectionTimeout: Long, |
| 205 | + androidResolution: Size? | ||
| 205 | ) { | 206 | ) { |
| 206 | this.detectionSpeed = detectionSpeed | 207 | this.detectionSpeed = detectionSpeed |
| 207 | this.detectionTimeout = detectionTimeout | 208 | this.detectionTimeout = detectionTimeout |
| @@ -253,16 +254,19 @@ class MobileScanner( | @@ -253,16 +254,19 @@ class MobileScanner( | ||
| 253 | .setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST) | 254 | .setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST) |
| 254 | val displayManager = activity.applicationContext.getSystemService(Context.DISPLAY_SERVICE) as DisplayManager | 255 | val displayManager = activity.applicationContext.getSystemService(Context.DISPLAY_SERVICE) as DisplayManager |
| 255 | val windowManager = activity.applicationContext.getSystemService(Context.WINDOW_SERVICE) as WindowManager | 256 | val windowManager = activity.applicationContext.getSystemService(Context.WINDOW_SERVICE) as WindowManager |
| 256 | - // Set initial resolution | ||
| 257 | - analysisBuilder.setTargetResolution(getResolution(windowManager)) | ||
| 258 | - // Listen future orientation | 257 | + |
| 258 | + if (androidResolution != null) { | ||
| 259 | + // Override initial resolution | ||
| 260 | + analysisBuilder.setTargetResolution(getResolution(windowManager, androidResolution)) | ||
| 261 | + // Listen future orientation change to apply the custom resolution | ||
| 259 | displayManager.registerDisplayListener(object : DisplayManager.DisplayListener { | 262 | displayManager.registerDisplayListener(object : DisplayManager.DisplayListener { |
| 260 | override fun onDisplayAdded(displayId: Int) {} | 263 | override fun onDisplayAdded(displayId: Int) {} |
| 261 | override fun onDisplayRemoved(displayId: Int) {} | 264 | override fun onDisplayRemoved(displayId: Int) {} |
| 262 | override fun onDisplayChanged(displayId: Int) { | 265 | override fun onDisplayChanged(displayId: Int) { |
| 263 | - analysisBuilder.setTargetResolution(getResolution(windowManager)) | 266 | + analysisBuilder.setTargetResolution(getResolution(windowManager, androidResolution)) |
| 264 | } | 267 | } |
| 265 | }, null) | 268 | }, null) |
| 269 | + } | ||
| 266 | 270 | ||
| 267 | val analysis = analysisBuilder.build().apply { setAnalyzer(executor, captureOutput) } | 271 | val analysis = analysisBuilder.build().apply { setAnalyzer(executor, captureOutput) } |
| 268 | 272 |
| @@ -2,6 +2,7 @@ package dev.steenbakker.mobile_scanner | @@ -2,6 +2,7 @@ package dev.steenbakker.mobile_scanner | ||
| 2 | 2 | ||
| 3 | import android.app.Activity | 3 | import android.app.Activity |
| 4 | import android.net.Uri | 4 | import android.net.Uri |
| 5 | +import android.util.Size | ||
| 5 | import androidx.camera.core.CameraSelector | 6 | import androidx.camera.core.CameraSelector |
| 6 | import androidx.camera.core.ExperimentalGetImage | 7 | import androidx.camera.core.ExperimentalGetImage |
| 7 | import com.google.mlkit.vision.barcode.BarcodeScannerOptions | 8 | import com.google.mlkit.vision.barcode.BarcodeScannerOptions |
| @@ -133,6 +134,12 @@ class MobileScannerHandler( | @@ -133,6 +134,12 @@ class MobileScannerHandler( | ||
| 133 | val returnImage: Boolean = call.argument<Boolean>("returnImage") ?: false | 134 | val returnImage: Boolean = call.argument<Boolean>("returnImage") ?: false |
| 134 | val speed: Int = call.argument<Int>("speed") ?: 1 | 135 | val speed: Int = call.argument<Int>("speed") ?: 1 |
| 135 | val timeout: Int = call.argument<Int>("timeout") ?: 250 | 136 | val timeout: Int = call.argument<Int>("timeout") ?: 250 |
| 137 | + val androidResolutionValueList: List<Int>? = call.argument<List<Int>>("androidResolution") | ||
| 138 | + val androidResolution: Size? = if (androidResolutionValueList != null) { | ||
| 139 | + Size(androidResolutionValueList[0], androidResolutionValueList[1]) | ||
| 140 | + } else { | ||
| 141 | + null | ||
| 142 | + } | ||
| 136 | 143 | ||
| 137 | var barcodeScannerOptions: BarcodeScannerOptions? = null | 144 | var barcodeScannerOptions: BarcodeScannerOptions? = null |
| 138 | if (formats != null) { | 145 | if (formats != null) { |
| @@ -164,7 +171,8 @@ class MobileScannerHandler( | @@ -164,7 +171,8 @@ class MobileScannerHandler( | ||
| 164 | "torchable" to it.hasFlashUnit | 171 | "torchable" to it.hasFlashUnit |
| 165 | )) | 172 | )) |
| 166 | }, | 173 | }, |
| 167 | - timeout.toLong()) | 174 | + timeout.toLong(), |
| 175 | + androidResolution) | ||
| 168 | 176 | ||
| 169 | } catch (e: AlreadyStarted) { | 177 | } catch (e: AlreadyStarted) { |
| 170 | result.error( | 178 | result.error( |
| @@ -214,18 +214,28 @@ public class MobileScanner: NSObject, AVCaptureVideoDataOutputSampleBufferDelega | @@ -214,18 +214,28 @@ public class MobileScanner: NSObject, AVCaptureVideoDataOutputSampleBufferDelega | ||
| 214 | print("Failed to reset zoom scale") | 214 | print("Failed to reset zoom scale") |
| 215 | } | 215 | } |
| 216 | 216 | ||
| 217 | - let dimensions = CMVideoFormatDescriptionGetDimensions(self.device.activeFormat.formatDescription) | 217 | + if let device = self.device { |
| 218 | + let dimensions = CMVideoFormatDescriptionGetDimensions( | ||
| 219 | + device.activeFormat.formatDescription) | ||
| 220 | + let hasTorch = device.hasTorch | ||
| 218 | 221 | ||
| 219 | DispatchQueue.main.async { | 222 | DispatchQueue.main.async { |
| 220 | completion( | 223 | completion( |
| 221 | MobileScannerStartParameters( | 224 | MobileScannerStartParameters( |
| 222 | width: Double(dimensions.height), | 225 | width: Double(dimensions.height), |
| 223 | height: Double(dimensions.width), | 226 | height: Double(dimensions.width), |
| 224 | - hasTorch: self.device.hasTorch, | ||
| 225 | - textureId: self.textureId | 227 | + hasTorch: hasTorch, |
| 228 | + textureId: self.textureId ?? 0 | ||
| 226 | ) | 229 | ) |
| 227 | ) | 230 | ) |
| 228 | } | 231 | } |
| 232 | + | ||
| 233 | + return | ||
| 234 | + } | ||
| 235 | + | ||
| 236 | + DispatchQueue.main.async { | ||
| 237 | + completion(MobileScannerStartParameters()) | ||
| 238 | + } | ||
| 229 | } | 239 | } |
| 230 | } | 240 | } |
| 231 | 241 |
| @@ -6,6 +6,7 @@ import 'package:mobile_scanner/src/mobile_scanner_controller.dart'; | @@ -6,6 +6,7 @@ import 'package:mobile_scanner/src/mobile_scanner_controller.dart'; | ||
| 6 | import 'package:mobile_scanner/src/mobile_scanner_exception.dart'; | 6 | import 'package:mobile_scanner/src/mobile_scanner_exception.dart'; |
| 7 | import 'package:mobile_scanner/src/objects/barcode_capture.dart'; | 7 | import 'package:mobile_scanner/src/objects/barcode_capture.dart'; |
| 8 | import 'package:mobile_scanner/src/objects/mobile_scanner_arguments.dart'; | 8 | import 'package:mobile_scanner/src/objects/mobile_scanner_arguments.dart'; |
| 9 | +import 'package:mobile_scanner/src/scan_window_calculation.dart'; | ||
| 9 | 10 | ||
| 10 | /// The function signature for the error builder. | 11 | /// The function signature for the error builder. |
| 11 | typedef MobileScannerErrorBuilder = Widget Function( | 12 | typedef MobileScannerErrorBuilder = Widget Function( |
| @@ -175,75 +176,6 @@ class _MobileScannerState extends State<MobileScanner> | @@ -175,75 +176,6 @@ class _MobileScannerState extends State<MobileScanner> | ||
| 175 | } | 176 | } |
| 176 | } | 177 | } |
| 177 | 178 | ||
| 178 | - /// the [scanWindow] rect will be relative and scaled to the [widgetSize] not the texture. so it is possible, | ||
| 179 | - /// depending on the [fit], for the [scanWindow] to partially or not at all overlap the [textureSize] | ||
| 180 | - /// | ||
| 181 | - /// since when using a [BoxFit] the content will always be centered on its parent. we can convert the rect | ||
| 182 | - /// to be relative to the texture. | ||
| 183 | - /// | ||
| 184 | - /// since the textures size and the actuall image (on the texture size) might not be the same, we also need to | ||
| 185 | - /// calculate the scanWindow in terms of percentages of the texture, not pixels. | ||
| 186 | - Rect calculateScanWindowRelativeToTextureInPercentage( | ||
| 187 | - BoxFit fit, | ||
| 188 | - Rect scanWindow, | ||
| 189 | - Size textureSize, | ||
| 190 | - Size widgetSize, | ||
| 191 | - ) { | ||
| 192 | - double fittedTextureWidth; | ||
| 193 | - double fittedTextureHeight; | ||
| 194 | - | ||
| 195 | - switch (fit) { | ||
| 196 | - case BoxFit.contain: | ||
| 197 | - final widthRatio = widgetSize.width / textureSize.width; | ||
| 198 | - final heightRatio = widgetSize.height / textureSize.height; | ||
| 199 | - final scale = widthRatio < heightRatio ? widthRatio : heightRatio; | ||
| 200 | - fittedTextureWidth = textureSize.width * scale; | ||
| 201 | - fittedTextureHeight = textureSize.height * scale; | ||
| 202 | - break; | ||
| 203 | - | ||
| 204 | - case BoxFit.cover: | ||
| 205 | - final widthRatio = widgetSize.width / textureSize.width; | ||
| 206 | - final heightRatio = widgetSize.height / textureSize.height; | ||
| 207 | - final scale = widthRatio > heightRatio ? widthRatio : heightRatio; | ||
| 208 | - fittedTextureWidth = textureSize.width * scale; | ||
| 209 | - fittedTextureHeight = textureSize.height * scale; | ||
| 210 | - break; | ||
| 211 | - | ||
| 212 | - case BoxFit.fill: | ||
| 213 | - fittedTextureWidth = widgetSize.width; | ||
| 214 | - fittedTextureHeight = widgetSize.height; | ||
| 215 | - break; | ||
| 216 | - | ||
| 217 | - case BoxFit.fitHeight: | ||
| 218 | - final ratio = widgetSize.height / textureSize.height; | ||
| 219 | - fittedTextureWidth = textureSize.width * ratio; | ||
| 220 | - fittedTextureHeight = widgetSize.height; | ||
| 221 | - break; | ||
| 222 | - | ||
| 223 | - case BoxFit.fitWidth: | ||
| 224 | - final ratio = widgetSize.width / textureSize.width; | ||
| 225 | - fittedTextureWidth = widgetSize.width; | ||
| 226 | - fittedTextureHeight = textureSize.height * ratio; | ||
| 227 | - break; | ||
| 228 | - | ||
| 229 | - case BoxFit.none: | ||
| 230 | - case BoxFit.scaleDown: | ||
| 231 | - fittedTextureWidth = textureSize.width; | ||
| 232 | - fittedTextureHeight = textureSize.height; | ||
| 233 | - break; | ||
| 234 | - } | ||
| 235 | - | ||
| 236 | - final offsetX = (widgetSize.width - fittedTextureWidth) / 2; | ||
| 237 | - final offsetY = (widgetSize.height - fittedTextureHeight) / 2; | ||
| 238 | - | ||
| 239 | - final left = (scanWindow.left - offsetX) / fittedTextureWidth; | ||
| 240 | - final top = (scanWindow.top - offsetY) / fittedTextureHeight; | ||
| 241 | - final right = (scanWindow.right - offsetX) / fittedTextureWidth; | ||
| 242 | - final bottom = (scanWindow.bottom - offsetY) / fittedTextureHeight; | ||
| 243 | - | ||
| 244 | - return Rect.fromLTRB(left, top, right, bottom); | ||
| 245 | - } | ||
| 246 | - | ||
| 247 | Rect? scanWindow; | 179 | Rect? scanWindow; |
| 248 | 180 | ||
| 249 | @override | 181 | @override |
| @@ -261,8 +193,8 @@ class _MobileScannerState extends State<MobileScanner> | @@ -261,8 +193,8 @@ class _MobileScannerState extends State<MobileScanner> | ||
| 261 | scanWindow = calculateScanWindowRelativeToTextureInPercentage( | 193 | scanWindow = calculateScanWindowRelativeToTextureInPercentage( |
| 262 | widget.fit, | 194 | widget.fit, |
| 263 | widget.scanWindow!, | 195 | widget.scanWindow!, |
| 264 | - value.size, | ||
| 265 | - Size(constraints.maxWidth, constraints.maxHeight), | 196 | + textureSize: value.size, |
| 197 | + widgetSize: constraints.biggest, | ||
| 266 | ); | 198 | ); |
| 267 | 199 | ||
| 268 | _controller.updateScanWindow(scanWindow); | 200 | _controller.updateScanWindow(scanWindow); |
| @@ -23,6 +23,7 @@ class MobileScannerController { | @@ -23,6 +23,7 @@ class MobileScannerController { | ||
| 23 | ) | 23 | ) |
| 24 | this.onPermissionSet, | 24 | this.onPermissionSet, |
| 25 | this.autoStart = true, | 25 | this.autoStart = true, |
| 26 | + this.androidResolution, | ||
| 26 | }); | 27 | }); |
| 27 | 28 | ||
| 28 | /// Select which camera should be used. | 29 | /// Select which camera should be used. |
| @@ -58,9 +59,25 @@ class MobileScannerController { | @@ -58,9 +59,25 @@ class MobileScannerController { | ||
| 58 | /// Automatically start the mobileScanner on initialization. | 59 | /// Automatically start the mobileScanner on initialization. |
| 59 | final bool autoStart; | 60 | final bool autoStart; |
| 60 | 61 | ||
| 62 | + /// Can be used to override default Android camera resolution. | ||
| 63 | + /// The default camera resolution is 640x480. | ||
| 64 | + /// Overriding the resolution can change the camera aspect ratio. | ||
| 65 | + /// | ||
| 66 | + /// Example: androidResolution: Size(1920, 2560); | ||
| 67 | + /// | ||
| 68 | + /// NOTE: | ||
| 69 | + /// Values inside this Size will be converted to integer type. | ||
| 70 | + /// | ||
| 71 | + /// The package Android implementation will manage itself the orientation. | ||
| 72 | + /// You don't need to update this parameter if orientation change. | ||
| 73 | + /// | ||
| 74 | + /// Android will take the closest resolution available if the overrided one can't be set | ||
| 75 | + final Size? androidResolution; | ||
| 76 | + | ||
| 61 | /// Sets the barcode stream | 77 | /// Sets the barcode stream |
| 62 | final StreamController<BarcodeCapture> _barcodesController = | 78 | final StreamController<BarcodeCapture> _barcodesController = |
| 63 | StreamController.broadcast(); | 79 | StreamController.broadcast(); |
| 80 | + | ||
| 64 | Stream<BarcodeCapture> get barcodes => _barcodesController.stream; | 81 | Stream<BarcodeCapture> get barcodes => _barcodesController.stream; |
| 65 | 82 | ||
| 66 | static const MethodChannel _methodChannel = | 83 | static const MethodChannel _methodChannel = |
| @@ -133,6 +150,12 @@ class MobileScannerController { | @@ -133,6 +150,12 @@ class MobileScannerController { | ||
| 133 | arguments['formats'] = formats!.map((e) => e.rawValue).toList(); | 150 | arguments['formats'] = formats!.map((e) => e.rawValue).toList(); |
| 134 | } else if (Platform.isAndroid) { | 151 | } else if (Platform.isAndroid) { |
| 135 | arguments['formats'] = formats!.map((e) => e.index).toList(); | 152 | arguments['formats'] = formats!.map((e) => e.index).toList(); |
| 153 | + if (androidResolution != null) { | ||
| 154 | + arguments['androidResolution'] = <int>[ | ||
| 155 | + androidResolution!.width.toInt(), | ||
| 156 | + androidResolution!.height.toInt(), | ||
| 157 | + ]; | ||
| 158 | + } | ||
| 136 | } | 159 | } |
| 137 | } | 160 | } |
| 138 | arguments['returnImage'] = returnImage; | 161 | arguments['returnImage'] = returnImage; |
| @@ -384,6 +407,7 @@ class MobileScannerController { | @@ -384,6 +407,7 @@ class MobileScannerController { | ||
| 384 | barcodes: [ | 407 | barcodes: [ |
| 385 | Barcode( | 408 | Barcode( |
| 386 | rawValue: (data as Map)['payload'] as String?, | 409 | rawValue: (data as Map)['payload'] as String?, |
| 410 | + format: toFormat(data['symbology'] as int), | ||
| 387 | ), | 411 | ), |
| 388 | ], | 412 | ], |
| 389 | ), | 413 | ), |
lib/src/scan_window_calculation.dart
0 → 100644
| 1 | +import 'dart:math'; | ||
| 2 | + | ||
| 3 | +import 'package:flutter/rendering.dart'; | ||
| 4 | + | ||
| 5 | +/// Calculate the scan window rectangle relative to the texture size. | ||
| 6 | +/// | ||
| 7 | +/// The [scanWindow] rectangle will be relative and scaled to [widgetSize], not [textureSize]. | ||
| 8 | +/// Depending on the given [fit], the [scanWindow] can partially overlap the [textureSize], | ||
| 9 | +/// or not at all. | ||
| 10 | +/// | ||
| 11 | +/// Due to using [BoxFit] the content will always be centered on its parent, | ||
| 12 | +/// which enables converting the rectangle to be relative to the texture. | ||
| 13 | +/// | ||
| 14 | +/// Because the size of the actual texture and the size of the texture in widget-space | ||
| 15 | +/// can be different, calculate the size of the scan window in percentages, | ||
| 16 | +/// rather than pixels. | ||
| 17 | +/// | ||
| 18 | +/// Returns a [Rect] that represents the position and size of the scan window in the texture. | ||
| 19 | +Rect calculateScanWindowRelativeToTextureInPercentage( | ||
| 20 | + BoxFit fit, | ||
| 21 | + Rect scanWindow, { | ||
| 22 | + required Size textureSize, | ||
| 23 | + required Size widgetSize, | ||
| 24 | +}) { | ||
| 25 | + // Convert the texture size to a size in widget-space, with the box fit applied. | ||
| 26 | + final fittedTextureSize = applyBoxFit(fit, textureSize, widgetSize); | ||
| 27 | + | ||
| 28 | + // Get the correct scaling values depending on the given BoxFit mode | ||
| 29 | + double sx = fittedTextureSize.destination.width / textureSize.width; | ||
| 30 | + double sy = fittedTextureSize.destination.height / textureSize.height; | ||
| 31 | + | ||
| 32 | + switch (fit) { | ||
| 33 | + case BoxFit.fill: | ||
| 34 | + // No-op, just use sx and sy. | ||
| 35 | + break; | ||
| 36 | + case BoxFit.contain: | ||
| 37 | + final s = min(sx, sy); | ||
| 38 | + sx = s; | ||
| 39 | + sy = s; | ||
| 40 | + break; | ||
| 41 | + case BoxFit.cover: | ||
| 42 | + final s = max(sx, sy); | ||
| 43 | + sx = s; | ||
| 44 | + sy = s; | ||
| 45 | + break; | ||
| 46 | + case BoxFit.fitWidth: | ||
| 47 | + sy = sx; | ||
| 48 | + break; | ||
| 49 | + case BoxFit.fitHeight: | ||
| 50 | + sx = sy; | ||
| 51 | + break; | ||
| 52 | + case BoxFit.none: | ||
| 53 | + sx = 1.0; | ||
| 54 | + sy = 1.0; | ||
| 55 | + break; | ||
| 56 | + case BoxFit.scaleDown: | ||
| 57 | + final s = min(sx, sy); | ||
| 58 | + sx = s; | ||
| 59 | + sy = s; | ||
| 60 | + break; | ||
| 61 | + } | ||
| 62 | + | ||
| 63 | + // Fit the texture size to the widget rectangle given by the scaling values above. | ||
| 64 | + final textureWindow = Alignment.center.inscribe( | ||
| 65 | + Size(textureSize.width * sx, textureSize.height * sy), | ||
| 66 | + Rect.fromLTWH(0, 0, widgetSize.width, widgetSize.height), | ||
| 67 | + ); | ||
| 68 | + | ||
| 69 | + // Transform the scan window from widget coordinates to texture coordinates. | ||
| 70 | + final scanWindowInTexSpace = Rect.fromLTRB( | ||
| 71 | + (1 / sx) * (scanWindow.left - textureWindow.left), | ||
| 72 | + (1 / sy) * (scanWindow.top - textureWindow.top), | ||
| 73 | + (1 / sx) * (scanWindow.right - textureWindow.left), | ||
| 74 | + (1 / sy) * (scanWindow.bottom - textureWindow.top), | ||
| 75 | + ); | ||
| 76 | + | ||
| 77 | + // Clip the scan window in texture coordinates with the texture bounds. | ||
| 78 | + // This prevents percentages outside the range [0; 1]. | ||
| 79 | + final clippedScanWndInTexSpace = scanWindowInTexSpace.intersect( | ||
| 80 | + Rect.fromLTWH(0, 0, textureSize.width, textureSize.height), | ||
| 81 | + ); | ||
| 82 | + | ||
| 83 | + // Compute relative rectangle coordinates, | ||
| 84 | + // with respect to the texture size, i.e. scan image. | ||
| 85 | + final percentageLeft = clippedScanWndInTexSpace.left / textureSize.width; | ||
| 86 | + final percentageTop = clippedScanWndInTexSpace.top / textureSize.height; | ||
| 87 | + final percentageRight = clippedScanWndInTexSpace.right / textureSize.width; | ||
| 88 | + final percentageBottom = clippedScanWndInTexSpace.bottom / textureSize.height; | ||
| 89 | + | ||
| 90 | + // This rectangle can be used to cut out a rectangle of the scan image. | ||
| 91 | + return Rect.fromLTRB( | ||
| 92 | + percentageLeft, | ||
| 93 | + percentageTop, | ||
| 94 | + percentageRight, | ||
| 95 | + percentageBottom, | ||
| 96 | + ); | ||
| 97 | +} |
| @@ -2,6 +2,7 @@ import AVFoundation | @@ -2,6 +2,7 @@ import AVFoundation | ||
| 2 | import FlutterMacOS | 2 | import FlutterMacOS |
| 3 | import Vision | 3 | import Vision |
| 4 | import AppKit | 4 | import AppKit |
| 5 | +import VideoToolbox | ||
| 5 | 6 | ||
| 6 | public class MobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler, FlutterTexture, AVCaptureVideoDataOutputSampleBufferDelegate { | 7 | public class MobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler, FlutterTexture, AVCaptureVideoDataOutputSampleBufferDelegate { |
| 7 | 8 | ||
| @@ -17,7 +18,7 @@ public class MobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler, | @@ -17,7 +18,7 @@ public class MobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler, | ||
| 17 | var captureSession: AVCaptureSession! | 18 | var captureSession: AVCaptureSession! |
| 18 | 19 | ||
| 19 | // The selected camera | 20 | // The selected camera |
| 20 | - var device: AVCaptureDevice! | 21 | + weak var device: AVCaptureDevice! |
| 21 | 22 | ||
| 22 | // Image to be sent to the texture | 23 | // Image to be sent to the texture |
| 23 | var latestBuffer: CVImageBuffer! | 24 | var latestBuffer: CVImageBuffer! |
| @@ -29,6 +30,8 @@ public class MobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler, | @@ -29,6 +30,8 @@ public class MobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler, | ||
| 29 | 30 | ||
| 30 | var timeoutSeconds: Double = 0 | 31 | var timeoutSeconds: Double = 0 |
| 31 | 32 | ||
| 33 | + var symbologies:[VNBarcodeSymbology] = [] | ||
| 34 | + | ||
| 32 | 35 | ||
| 33 | // var analyzeMode: Int = 0 | 36 | // var analyzeMode: Int = 0 |
| 34 | var analyzing: Bool = false | 37 | var analyzing: Bool = false |
| @@ -93,7 +96,7 @@ public class MobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler, | @@ -93,7 +96,7 @@ public class MobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler, | ||
| 93 | } | 96 | } |
| 94 | 97 | ||
| 95 | var nextScanTime = 0.0 | 98 | var nextScanTime = 0.0 |
| 96 | - var imagesCurrentlyBeingProcessed = 0 | 99 | + var imagesCurrentlyBeingProcessed = false |
| 97 | 100 | ||
| 98 | // Gets called when a new image is added to the buffer | 101 | // Gets called when a new image is added to the buffer |
| 99 | public func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { | 102 | public func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { |
| @@ -109,44 +112,52 @@ public class MobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler, | @@ -109,44 +112,52 @@ public class MobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler, | ||
| 109 | registry.textureFrameAvailable(textureId) | 112 | registry.textureFrameAvailable(textureId) |
| 110 | 113 | ||
| 111 | let currentTime = Date().timeIntervalSince1970 | 114 | let currentTime = Date().timeIntervalSince1970 |
| 112 | - let eligibleForScan = currentTime > nextScanTime && imagesCurrentlyBeingProcessed == 0; | 115 | + let eligibleForScan = currentTime > nextScanTime && !imagesCurrentlyBeingProcessed |
| 113 | if ((detectionSpeed == DetectionSpeed.normal || detectionSpeed == DetectionSpeed.noDuplicates) && eligibleForScan || detectionSpeed == DetectionSpeed.unrestricted) { | 116 | if ((detectionSpeed == DetectionSpeed.normal || detectionSpeed == DetectionSpeed.noDuplicates) && eligibleForScan || detectionSpeed == DetectionSpeed.unrestricted) { |
| 114 | nextScanTime = currentTime + timeoutSeconds | 117 | nextScanTime = currentTime + timeoutSeconds |
| 115 | - imagesCurrentlyBeingProcessed += 1 | ||
| 116 | - let imageRequestHandler = VNImageRequestHandler( | ||
| 117 | - cvPixelBuffer: latestBuffer, | ||
| 118 | - orientation: .right) | ||
| 119 | - | 118 | + imagesCurrentlyBeingProcessed = true |
| 119 | + DispatchQueue.global(qos: .userInitiated).async { [weak self] in | ||
| 120 | + if(self!.latestBuffer == nil){ | ||
| 121 | + return | ||
| 122 | + } | ||
| 123 | + var cgImage: CGImage? | ||
| 124 | + VTCreateCGImageFromCVPixelBuffer(self!.latestBuffer, options: nil, imageOut: &cgImage) | ||
| 125 | + let imageRequestHandler = VNImageRequestHandler(cgImage: cgImage!) | ||
| 120 | do { | 126 | do { |
| 121 | - try imageRequestHandler.perform([VNDetectBarcodesRequest { [self] (request, error) in | ||
| 122 | - imagesCurrentlyBeingProcessed -= 1 | 127 | + let barcodeRequest:VNDetectBarcodesRequest = VNDetectBarcodesRequest(completionHandler: { [weak self] (request, error) in |
| 128 | + self?.imagesCurrentlyBeingProcessed = false | ||
| 123 | if error == nil { | 129 | if error == nil { |
| 124 | if let results = request.results as? [VNBarcodeObservation] { | 130 | if let results = request.results as? [VNBarcodeObservation] { |
| 125 | for barcode in results { | 131 | for barcode in results { |
| 126 | - if self.scanWindow != nil { | ||
| 127 | - let match = self.isbarCodeInScanWindow(self.scanWindow!, barcode, self.latestBuffer) | 132 | + if self?.scanWindow != nil && cgImage != nil { |
| 133 | + let match = self?.isBarCodeInScanWindow(self!.scanWindow!, barcode, cgImage!) ?? false | ||
| 128 | if (!match) { | 134 | if (!match) { |
| 129 | continue | 135 | continue |
| 130 | } | 136 | } |
| 131 | } | 137 | } |
| 132 | 138 | ||
| 133 | - let barcodeType = String(barcode.symbology.rawValue).replacingOccurrences(of: "VNBarcodeSymbology", with: "") | ||
| 134 | - let event: [String: Any?] = ["name": "barcodeMac", "data" : ["payload": barcode.payloadStringValue, "symbology": barcodeType]] | ||
| 135 | - self.sink?(event) | ||
| 136 | - | ||
| 137 | - // if barcodeType == "QR" { | ||
| 138 | - // let image = CIImage(image: source) | ||
| 139 | - // image?.cropping(to: barcode.boundingBox) | ||
| 140 | - // self.qrCodeDescriptor(qrCode: barcode, qrCodeImage: image!) | ||
| 141 | - // } | 139 | + DispatchQueue.main.async { |
| 140 | + self?.sink?(["name": "barcodeMac", "data" : ["payload": barcode.payloadStringValue, "symbology": barcode.symbology.toInt as Any?]] as [String : Any]) | ||
| 141 | + } | ||
| 142 | +// if barcodeType == "QR" { | ||
| 143 | +// let image = CIImage(image: source) | ||
| 144 | +// image?.cropping(to: barcode.boundingBox) | ||
| 145 | +// self.qrCodeDescriptor(qrCode: barcode, qrCodeImage: image!) | ||
| 146 | +// } | ||
| 142 | } | 147 | } |
| 143 | } | 148 | } |
| 144 | } else { | 149 | } else { |
| 145 | - print(error!.localizedDescription) | 150 | + self?.sink?(FlutterError(code: "MobileScanner", message: error?.localizedDescription, details: nil)) |
| 151 | + } | ||
| 152 | + }) | ||
| 153 | + if(self?.symbologies.isEmpty == false){ | ||
| 154 | + // add the symbologies the user wishes to support | ||
| 155 | + barcodeRequest.symbologies = self!.symbologies | ||
| 156 | + } | ||
| 157 | + try imageRequestHandler.perform([barcodeRequest]) | ||
| 158 | + } catch let e { | ||
| 159 | + self?.sink?(FlutterError(code: "MobileScanner", message: e.localizedDescription, details: nil)) | ||
| 146 | } | 160 | } |
| 147 | - }]) | ||
| 148 | - } catch { | ||
| 149 | - print(error) | ||
| 150 | } | 161 | } |
| 151 | } | 162 | } |
| 152 | } | 163 | } |
| @@ -192,7 +203,21 @@ public class MobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler, | @@ -192,7 +203,21 @@ public class MobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler, | ||
| 192 | scanWindow = CGRect(x: minX, y: minY, width: width, height: height) | 203 | scanWindow = CGRect(x: minX, y: minY, width: width, height: height) |
| 193 | } | 204 | } |
| 194 | 205 | ||
| 195 | - func isbarCodeInScanWindow(_ scanWindow: CGRect, _ barcode: VNBarcodeObservation, _ inputImage: CVImageBuffer) -> Bool { | 206 | + func isBarCodeInScanWindow(_ scanWindow: CGRect, _ barcode: VNBarcodeObservation, _ inputImage: CGImage) -> Bool { |
| 207 | + | ||
| 208 | + let imageWidth = CGFloat(inputImage.width); | ||
| 209 | + let imageHeight = CGFloat(inputImage.height); | ||
| 210 | + | ||
| 211 | + let minX = scanWindow.minX * imageWidth | ||
| 212 | + let minY = scanWindow.minY * imageHeight | ||
| 213 | + let width = scanWindow.width * imageWidth | ||
| 214 | + let height = scanWindow.height * imageHeight | ||
| 215 | + | ||
| 216 | + let scaledScanWindow = CGRect(x: minX, y: minY, width: width, height: height) | ||
| 217 | + return scaledScanWindow.contains(barcode.boundingBox) | ||
| 218 | + } | ||
| 219 | + | ||
| 220 | + func isBarCodeInScanWindow(_ scanWindow: CGRect, _ barcode: VNBarcodeObservation, _ inputImage: CVImageBuffer) -> Bool { | ||
| 196 | let size = CVImageBufferGetEncodedSize(inputImage) | 221 | let size = CVImageBufferGetEncodedSize(inputImage) |
| 197 | 222 | ||
| 198 | let imageWidth = size.width; | 223 | let imageWidth = size.width; |
| @@ -220,13 +245,14 @@ public class MobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler, | @@ -220,13 +245,14 @@ public class MobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler, | ||
| 220 | 245 | ||
| 221 | let argReader = MapArgumentReader(call.arguments as? [String: Any]) | 246 | let argReader = MapArgumentReader(call.arguments as? [String: Any]) |
| 222 | 247 | ||
| 223 | -// let ratio: Int = argReader.int(key: "ratio") | ||
| 224 | - let torch: Bool = argReader.bool(key: "torch") ?? false | ||
| 225 | - let facing: Int = argReader.int(key: "facing") ?? 1 | ||
| 226 | - let speed: Int = (call.arguments as! Dictionary<String, Any?>)["speed"] as? Int ?? 0 | ||
| 227 | - let timeoutMs: Int = (call.arguments as! Dictionary<String, Any?>)["timeout"] as? Int ?? 0 | 248 | + // let ratio: Int = argReader.int(key: "ratio") |
| 249 | + let torch:Bool = argReader.bool(key: "torch") ?? false | ||
| 250 | + let facing:Int = argReader.int(key: "facing") ?? 1 | ||
| 251 | + let speed:Int = argReader.int(key: "speed") ?? 0 | ||
| 252 | + let timeoutMs:Int = argReader.int(key: "timeout") ?? 0 | ||
| 253 | + symbologies = argReader.toSymbology() | ||
| 228 | 254 | ||
| 229 | - timeoutSeconds = Double(timeoutMs) * 1000.0 | 255 | + timeoutSeconds = Double(timeoutMs) / 1000.0 |
| 230 | detectionSpeed = DetectionSpeed(rawValue: speed)! | 256 | detectionSpeed = DetectionSpeed(rawValue: speed)! |
| 231 | 257 | ||
| 232 | // Set the camera to use | 258 | // Set the camera to use |
| @@ -277,7 +303,7 @@ public class MobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler, | @@ -277,7 +303,7 @@ public class MobileScannerPlugin: NSObject, FlutterPlugin, FlutterStreamHandler, | ||
| 277 | videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue.main) | 303 | videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue.main) |
| 278 | captureSession.addOutput(videoOutput) | 304 | captureSession.addOutput(videoOutput) |
| 279 | for connection in videoOutput.connections { | 305 | for connection in videoOutput.connections { |
| 280 | -// connection.videoOrientation = .portrait | 306 | + // connection.videoOrientation = .portrait |
| 281 | if position == .front && connection.isVideoMirroringSupported { | 307 | if position == .front && connection.isVideoMirroringSupported { |
| 282 | connection.isVideoMirrored = true | 308 | connection.isVideoMirrored = true |
| 283 | } | 309 | } |
| @@ -373,8 +399,95 @@ class MapArgumentReader { | @@ -373,8 +399,95 @@ class MapArgumentReader { | ||
| 373 | return args?[key] as? [String] | 399 | return args?[key] as? [String] |
| 374 | } | 400 | } |
| 375 | 401 | ||
| 402 | + func toSymbology() -> [VNBarcodeSymbology] { | ||
| 403 | + guard let syms:[Int] = args?["formats"] as? [Int] else { | ||
| 404 | + return [] | ||
| 405 | + } | ||
| 406 | + if(syms.contains(0)){ | ||
| 407 | + return [] | ||
| 408 | + } | ||
| 409 | + var barcodeFormats:[VNBarcodeSymbology] = [] | ||
| 410 | + syms.forEach { id in | ||
| 411 | + if let bc:VNBarcodeSymbology = VNBarcodeSymbology.fromInt(id) { | ||
| 412 | + barcodeFormats.append(bc) | ||
| 413 | + } | ||
| 414 | + } | ||
| 415 | + return barcodeFormats | ||
| 416 | + } | ||
| 417 | + | ||
| 376 | func floatArray(key: String) -> [CGFloat]? { | 418 | func floatArray(key: String) -> [CGFloat]? { |
| 377 | return args?[key] as? [CGFloat] | 419 | return args?[key] as? [CGFloat] |
| 378 | } | 420 | } |
| 379 | 421 | ||
| 380 | } | 422 | } |
| 423 | + | ||
| 424 | +extension VNBarcodeSymbology { | ||
| 425 | + | ||
| 426 | + static func fromInt(_ mapValue:Int) -> VNBarcodeSymbology? { | ||
| 427 | + if #available(macOS 12.0, *) { | ||
| 428 | + if(mapValue == 8){ | ||
| 429 | + return VNBarcodeSymbology.codabar | ||
| 430 | + } | ||
| 431 | + } | ||
| 432 | + switch(mapValue){ | ||
| 433 | + case 1: | ||
| 434 | + return VNBarcodeSymbology.code128 | ||
| 435 | + case 2: | ||
| 436 | + return VNBarcodeSymbology.code39 | ||
| 437 | + case 4: | ||
| 438 | + return VNBarcodeSymbology.code93 | ||
| 439 | + case 16: | ||
| 440 | + return VNBarcodeSymbology.dataMatrix | ||
| 441 | + case 32: | ||
| 442 | + return VNBarcodeSymbology.ean13 | ||
| 443 | + case 64: | ||
| 444 | + return VNBarcodeSymbology.ean8 | ||
| 445 | + case 128: | ||
| 446 | + return VNBarcodeSymbology.itf14 | ||
| 447 | + case 256: | ||
| 448 | + return VNBarcodeSymbology.qr | ||
| 449 | + case 1024: | ||
| 450 | + return VNBarcodeSymbology.upce | ||
| 451 | + case 2048: | ||
| 452 | + return VNBarcodeSymbology.pdf417 | ||
| 453 | + case 4096: | ||
| 454 | + return VNBarcodeSymbology.aztec | ||
| 455 | + default: | ||
| 456 | + return nil | ||
| 457 | + } | ||
| 458 | + } | ||
| 459 | + | ||
| 460 | + var toInt:Int? { | ||
| 461 | + if #available(macOS 12.0, *) { | ||
| 462 | + if(self == VNBarcodeSymbology.codabar){ | ||
| 463 | + return 8 | ||
| 464 | + } | ||
| 465 | + } | ||
| 466 | + switch(self){ | ||
| 467 | + case VNBarcodeSymbology.code128: | ||
| 468 | + return 1 | ||
| 469 | + case VNBarcodeSymbology.code39: | ||
| 470 | + return 2 | ||
| 471 | + case VNBarcodeSymbology.code93: | ||
| 472 | + return 4 | ||
| 473 | + case VNBarcodeSymbology.dataMatrix: | ||
| 474 | + return 16 | ||
| 475 | + case VNBarcodeSymbology.ean13: | ||
| 476 | + return 32 | ||
| 477 | + case VNBarcodeSymbology.ean8: | ||
| 478 | + return 64 | ||
| 479 | + case VNBarcodeSymbology.itf14: | ||
| 480 | + return 128 | ||
| 481 | + case VNBarcodeSymbology.qr: | ||
| 482 | + return 256 | ||
| 483 | + case VNBarcodeSymbology.upce: | ||
| 484 | + return 1024 | ||
| 485 | + case VNBarcodeSymbology.pdf417: | ||
| 486 | + return 2048 | ||
| 487 | + case VNBarcodeSymbology.aztec: | ||
| 488 | + return 4096 | ||
| 489 | + default: | ||
| 490 | + return -1; | ||
| 491 | + } | ||
| 492 | + } | ||
| 493 | +} |
test/scan_window_test.dart
0 → 100644
| 1 | +import 'package:flutter/painting.dart'; | ||
| 2 | +import 'package:flutter_test/flutter_test.dart'; | ||
| 3 | +import 'package:mobile_scanner/src/scan_window_calculation.dart'; | ||
| 4 | + | ||
| 5 | +void main() { | ||
| 6 | + group( | ||
| 7 | + 'Scan window relative to texture', | ||
| 8 | + () { | ||
| 9 | + group('Widget (landscape) smaller than texture (portrait)', () { | ||
| 10 | + const textureSize = Size(480.0, 640.0); | ||
| 11 | + const widgetSize = Size(432.0, 256.0); | ||
| 12 | + final ctx = ScanWindowTestContext( | ||
| 13 | + textureSize: textureSize, | ||
| 14 | + widgetSize: widgetSize, | ||
| 15 | + scanWindow: Rect.fromLTWH( | ||
| 16 | + widgetSize.width / 4, | ||
| 17 | + widgetSize.height / 4, | ||
| 18 | + widgetSize.width / 2, | ||
| 19 | + widgetSize.height / 2, | ||
| 20 | + ), | ||
| 21 | + ); | ||
| 22 | + | ||
| 23 | + test('wl tp: BoxFit.none', () { | ||
| 24 | + ctx.testScanWindow( | ||
| 25 | + BoxFit.none, | ||
| 26 | + const Rect.fromLTRB(0.275, 0.4, 0.725, 0.6), | ||
| 27 | + ); | ||
| 28 | + }); | ||
| 29 | + | ||
| 30 | + test('wl tp: BoxFit.fill', () { | ||
| 31 | + ctx.testScanWindow( | ||
| 32 | + BoxFit.fill, | ||
| 33 | + const Rect.fromLTRB(0.25, 0.25, 0.75, 0.75), | ||
| 34 | + ); | ||
| 35 | + }); | ||
| 36 | + | ||
| 37 | + test('wl tp: BoxFit.fitHeight', () { | ||
| 38 | + ctx.testScanWindow( | ||
| 39 | + BoxFit.fitHeight, | ||
| 40 | + const Rect.fromLTRB(0.0, 0.25, 1.0, 0.75), | ||
| 41 | + ); | ||
| 42 | + }); | ||
| 43 | + | ||
| 44 | + test('wl tp: BoxFit.fitWidth', () { | ||
| 45 | + ctx.testScanWindow( | ||
| 46 | + BoxFit.fitWidth, | ||
| 47 | + const Rect.fromLTRB( | ||
| 48 | + 0.25, | ||
| 49 | + 0.38888888888888895, | ||
| 50 | + 0.75, | ||
| 51 | + 0.6111111111111112, | ||
| 52 | + ), | ||
| 53 | + ); | ||
| 54 | + }); | ||
| 55 | + | ||
| 56 | + test('wl tp: BoxFit.cover', () { | ||
| 57 | + // equal to fitWidth | ||
| 58 | + ctx.testScanWindow( | ||
| 59 | + BoxFit.cover, | ||
| 60 | + const Rect.fromLTRB( | ||
| 61 | + 0.25, | ||
| 62 | + 0.38888888888888895, | ||
| 63 | + 0.75, | ||
| 64 | + 0.6111111111111112, | ||
| 65 | + ), | ||
| 66 | + ); | ||
| 67 | + }); | ||
| 68 | + | ||
| 69 | + test('wl tp: BoxFit.contain', () { | ||
| 70 | + // equal to fitHeigth | ||
| 71 | + ctx.testScanWindow( | ||
| 72 | + BoxFit.contain, | ||
| 73 | + const Rect.fromLTRB(0.0, 0.25, 1.0, 0.75), | ||
| 74 | + ); | ||
| 75 | + }); | ||
| 76 | + | ||
| 77 | + test('wl tp: BoxFit.scaleDown', () { | ||
| 78 | + // equal to fitHeigth, contain | ||
| 79 | + ctx.testScanWindow( | ||
| 80 | + BoxFit.scaleDown, | ||
| 81 | + const Rect.fromLTRB(0.0, 0.25, 1.0, 0.75), | ||
| 82 | + ); | ||
| 83 | + }); | ||
| 84 | + }); | ||
| 85 | + | ||
| 86 | + group('Widget (landscape) smaller than texture and texture (landscape)', | ||
| 87 | + () { | ||
| 88 | + const textureSize = Size(640.0, 480.0); | ||
| 89 | + const widgetSize = Size(320.0, 120.0); | ||
| 90 | + final ctx = ScanWindowTestContext( | ||
| 91 | + textureSize: textureSize, | ||
| 92 | + widgetSize: widgetSize, | ||
| 93 | + scanWindow: Rect.fromLTWH( | ||
| 94 | + widgetSize.width / 4, | ||
| 95 | + widgetSize.height / 4, | ||
| 96 | + widgetSize.width / 2, | ||
| 97 | + widgetSize.height / 2, | ||
| 98 | + ), | ||
| 99 | + ); | ||
| 100 | + | ||
| 101 | + test('wl tl: BoxFit.none', () { | ||
| 102 | + ctx.testScanWindow( | ||
| 103 | + BoxFit.none, | ||
| 104 | + const Rect.fromLTRB(0.375, 0.4375, 0.625, 0.5625), | ||
| 105 | + ); | ||
| 106 | + }); | ||
| 107 | + | ||
| 108 | + test('wl tl: BoxFit.fill', () { | ||
| 109 | + ctx.testScanWindow( | ||
| 110 | + BoxFit.fill, | ||
| 111 | + const Rect.fromLTRB(0.25, 0.25, 0.75, 0.75), | ||
| 112 | + ); | ||
| 113 | + }); | ||
| 114 | + | ||
| 115 | + test('wl tl: BoxFit.fitHeight', () { | ||
| 116 | + ctx.testScanWindow( | ||
| 117 | + BoxFit.fitHeight, | ||
| 118 | + const Rect.fromLTRB(0.0, 0.25, 1.0, 0.75), | ||
| 119 | + ); | ||
| 120 | + }); | ||
| 121 | + | ||
| 122 | + test('wl tl: BoxFit.fitWidth', () { | ||
| 123 | + ctx.testScanWindow( | ||
| 124 | + BoxFit.fitWidth, | ||
| 125 | + const Rect.fromLTRB(0.25, 0.375, 0.75, 0.625), | ||
| 126 | + ); | ||
| 127 | + }); | ||
| 128 | + | ||
| 129 | + test('wl tl: BoxFit.cover', () { | ||
| 130 | + // equal to fitWidth | ||
| 131 | + ctx.testScanWindow( | ||
| 132 | + BoxFit.cover, | ||
| 133 | + const Rect.fromLTRB(0.25, 0.375, 0.75, 0.625), | ||
| 134 | + ); | ||
| 135 | + }); | ||
| 136 | + | ||
| 137 | + test('wl tl: BoxFit.contain', () { | ||
| 138 | + // equal to fitHeigth | ||
| 139 | + ctx.testScanWindow( | ||
| 140 | + BoxFit.contain, | ||
| 141 | + const Rect.fromLTRB(0.0, 0.25, 1.0, 0.75), | ||
| 142 | + ); | ||
| 143 | + }); | ||
| 144 | + | ||
| 145 | + test('wl tl: BoxFit.scaleDown', () { | ||
| 146 | + // equal to fitHeigth, contain | ||
| 147 | + ctx.testScanWindow( | ||
| 148 | + BoxFit.scaleDown, | ||
| 149 | + const Rect.fromLTRB(0.0, 0.25, 1.0, 0.75), | ||
| 150 | + ); | ||
| 151 | + }); | ||
| 152 | + }); | ||
| 153 | + }, | ||
| 154 | + ); | ||
| 155 | +} | ||
| 156 | + | ||
| 157 | +class ScanWindowTestContext { | ||
| 158 | + ScanWindowTestContext({ | ||
| 159 | + required this.textureSize, | ||
| 160 | + required this.widgetSize, | ||
| 161 | + required this.scanWindow, | ||
| 162 | + }); | ||
| 163 | + | ||
| 164 | + final Size textureSize; | ||
| 165 | + final Size widgetSize; | ||
| 166 | + final Rect scanWindow; | ||
| 167 | + | ||
| 168 | + void testScanWindow(BoxFit fit, Rect expected) { | ||
| 169 | + final actual = calculateScanWindowRelativeToTextureInPercentage( | ||
| 170 | + fit, | ||
| 171 | + scanWindow, | ||
| 172 | + textureSize: textureSize, | ||
| 173 | + widgetSize: widgetSize, | ||
| 174 | + ); | ||
| 175 | + | ||
| 176 | + // don't use expect(actual, expected) because Rect.toString() only shows one digit after the comma which can be confusing | ||
| 177 | + expect(actual.left, expected.left); | ||
| 178 | + expect(actual.top, expected.top); | ||
| 179 | + expect(actual.right, expected.right); | ||
| 180 | + expect(actual.bottom, expected.bottom); | ||
| 181 | + } | ||
| 182 | +} |
-
Please register or login to post a comment