Julian Steenbakker
Committed by GitHub

Merge branch 'master' into feature/zoom

... ... @@ -7,7 +7,7 @@
release-please:
runs-on: ubuntu-latest
steps:
- uses: GoogleCloudPlatform/release-please-action@v3.6.0
- uses: GoogleCloudPlatform/release-please-action@v3.6.1
with:
token: ${{ secrets.GITHUB_TOKEN }}
release-type: simple
... ...
... ... @@ -45,12 +45,9 @@ Ensure that you granted camera permission in XCode -> Signing & Capabilities:
Add this to `web/index.html`:
```html
<script src="https://cdn.jsdelivr.net/npm/jsqr@1.4.0/dist/jsQR.min.js"></script>
<script type="text/javascript" src="https://unpkg.com/@zxing/library@0.19.1"></script>
```
Web only supports QR codes for now.
Do you have experience with Flutter Web development? [Help me with migrating from jsQR to qr-scanner for full barcode support!](https://github.com/juliansteenbakker/mobile_scanner/issues/54)
## Features Supported
| Features | Android | iOS | macOS | Web |
... ...
... ... @@ -2,7 +2,7 @@ group 'dev.steenbakker.mobile_scanner'
version '1.0-SNAPSHOT'
buildscript {
ext.kotlin_version = '1.7.21'
ext.kotlin_version = '1.7.22'
repositories {
google()
mavenCentral()
... ...
buildscript {
ext.kotlin_version = '1.7.21'
ext.kotlin_version = '1.7.22'
repositories {
google()
mavenCentral()
... ...
... ... @@ -77,33 +77,41 @@ class _BarcodeScannerWithControllerState
child: Row(
mainAxisAlignment: MainAxisAlignment.spaceEvenly,
children: [
IconButton(
color: Colors.white,
icon: ValueListenableBuilder(
valueListenable: controller.torchState,
builder: (context, state, child) {
if (state == null) {
return const Icon(
Icons.flash_off,
color: Colors.grey,
);
}
switch (state as TorchState) {
case TorchState.off:
return const Icon(
Icons.flash_off,
color: Colors.grey,
);
case TorchState.on:
return const Icon(
Icons.flash_on,
color: Colors.yellow,
);
}
},
),
iconSize: 32.0,
onPressed: () => controller.toggleTorch(),
ValueListenableBuilder(
valueListenable: controller.hasTorchState,
builder: (context, state, child) {
if (state != true) {
return const SizedBox.shrink();
}
return IconButton(
color: Colors.white,
icon: ValueListenableBuilder(
valueListenable: controller.torchState,
builder: (context, state, child) {
if (state == null) {
return const Icon(
Icons.flash_off,
color: Colors.grey,
);
}
switch (state as TorchState) {
case TorchState.off:
return const Icon(
Icons.flash_off,
color: Colors.grey,
);
case TorchState.on:
return const Icon(
Icons.flash_on,
color: Colors.yellow,
);
}
},
),
iconSize: 32.0,
onPressed: () => controller.toggleTorch(),
);
},
),
IconButton(
color: Colors.white,
... ...
... ... @@ -28,8 +28,8 @@
<title>example</title>
<link rel="manifest" href="manifest.json">
<!-- <script src="https://cdn.jsdelivr.net/npm/qr-scanner@1.4.1/qr-scanner.min.js"></script>-->
<script src="https://cdn.jsdelivr.net/npm/jsqr@1.4.0/dist/jsQR.min.js"></script>
<script type="text/javascript" src="https://unpkg.com/@zxing/library@0.19.1"></script>
</head>
<body>
<!-- This script installs service_worker.js to provide PWA functionality to
... ...
library mobile_scanner_web;
export 'src/web/base.dart';
export 'src/web/jsqr.dart';
export 'src/web/zxing.dart';
... ...
... ... @@ -2,12 +2,10 @@ import 'dart:async';
import 'dart:html' as html;
import 'dart:ui' as ui;
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
import 'package:flutter_web_plugins/flutter_web_plugins.dart';
import 'package:mobile_scanner/mobile_scanner_web.dart';
import 'package:mobile_scanner/src/enums/camera_facing.dart';
import 'package:mobile_scanner/src/web/jsqr.dart';
import 'package:mobile_scanner/src/web/media.dart';
/// This plugin is the web implementation of mobile_scanner.
/// It only supports QR codes.
... ... @@ -32,20 +30,14 @@ class MobileScannerWebPlugin {
// Controller to send events back to the framework
StreamController controller = StreamController.broadcast();
// The video stream. Will be initialized later to see which camera needs to be used.
html.MediaStream? _localStream;
html.VideoElement video = html.VideoElement();
// ID of the video feed
String viewID = 'WebScanner-${DateTime.now().millisecondsSinceEpoch}';
// Determine wether device has flas
bool hasFlash = false;
// Timer used to capture frames to be analyzed
Timer? _frameInterval;
static final html.DivElement vidDiv = html.DivElement();
html.DivElement vidDiv = html.DivElement();
static WebBarcodeReaderBase barCodeReader =
ZXingBarcodeReader(videoContainer: vidDiv);
StreamSubscription? _barCodeStreamSubscription;
/// Handle incomming messages
Future<dynamic> handleMethodCall(MethodCall call) async {
... ... @@ -67,20 +59,11 @@ class MobileScannerWebPlugin {
/// Can enable or disable the flash if available
Future<void> _torch(arguments) async {
if (hasFlash) {
final track = _localStream?.getVideoTracks();
await track!.first.applyConstraints({
'advanced': {'torch': arguments == 1}
});
} else {
controller.addError('Device has no flash');
}
barCodeReader.toggleTorch(enabled: arguments == 1);
}
/// Starts the video stream and the scanner
Future<Map> _start(Map arguments) async {
vidDiv.children = [video];
var cameraFacing = CameraFacing.front;
if (arguments.containsKey('facing')) {
cameraFacing = CameraFacing.values[arguments['facing'] as int];
... ... @@ -90,64 +73,51 @@ class MobileScannerWebPlugin {
// ignore: UNDEFINED_PREFIXED_NAME, avoid_dynamic_calls
ui.platformViewRegistry.registerViewFactory(
viewID,
(int id) => vidDiv
..style.width = '100%'
..style.height = '100%',
(int id) {
return vidDiv
..style.width = '100%'
..style.height = '100%';
},
);
// Check if stream is running
if (_localStream != null) {
if (barCodeReader.isStarted) {
final hasTorch = await barCodeReader.hasTorch();
return {
'ViewID': viewID,
'videoWidth': video.videoWidth,
'videoHeight': video.videoHeight
'videoWidth': barCodeReader.videoWidth,
'videoHeight': barCodeReader.videoHeight,
'torchable': hasTorch,
};
}
try {
// Check if browser supports multiple camera's and set if supported
final Map? capabilities =
html.window.navigator.mediaDevices?.getSupportedConstraints();
if (capabilities != null && capabilities['facingMode'] as bool) {
final constraints = {
'video': VideoOptions(
facingMode:
cameraFacing == CameraFacing.front ? 'user' : 'environment',
)
};
_localStream =
await html.window.navigator.mediaDevices?.getUserMedia(constraints);
} else {
_localStream = await html.window.navigator.mediaDevices
?.getUserMedia({'video': true});
}
video.srcObject = _localStream;
// TODO: fix flash light. See https://github.com/dart-lang/sdk/issues/48533
// final track = _localStream?.getVideoTracks();
// if (track != null) {
// final imageCapture = html.ImageCapture(track.first);
// final photoCapabilities = await imageCapture.getPhotoCapabilities();
// }
// required to tell iOS safari we don't want fullscreen
video.setAttribute('playsinline', 'true');
await video.play();
// Then capture a frame to be analyzed every 200 miliseconds
_frameInterval =
Timer.periodic(const Duration(milliseconds: 200), (timer) {
_captureFrame();
await barCodeReader.start(
cameraFacing: cameraFacing,
);
_barCodeStreamSubscription =
barCodeReader.detectBarcodeContinuously().listen((code) {
if (code != null) {
controller.add({
'name': 'barcodeWeb',
'data': {
'rawValue': code.rawValue,
'rawBytes': code.rawBytes,
},
});
}
});
final hasTorch = await barCodeReader.hasTorch();
if (hasTorch && arguments.containsKey('torch')) {
barCodeReader.toggleTorch(enabled: arguments['torch'] as bool);
}
return {
'ViewID': viewID,
'videoWidth': video.videoWidth,
'videoHeight': video.videoHeight,
'torchable': hasFlash
'videoWidth': barCodeReader.videoWidth,
'videoHeight': barCodeReader.videoHeight,
'torchable': hasTorch,
};
} catch (e) {
throw PlatformException(code: 'MobileScannerWeb', message: '$e');
... ... @@ -170,40 +140,8 @@ class MobileScannerWebPlugin {
/// Stops the video feed and analyzer
Future<void> cancel() async {
try {
// Stop the camera stream
_localStream?.getTracks().forEach((track) {
if (track.readyState == 'live') {
track.stop();
}
});
} catch (e) {
debugPrint('Failed to stop stream: $e');
}
video.srcObject = null;
_localStream = null;
_frameInterval?.cancel();
_frameInterval = null;
}
/// Captures a frame and analyzes it for QR codes
Future<dynamic> _captureFrame() async {
if (_localStream == null) return null;
final canvas =
html.CanvasElement(width: video.videoWidth, height: video.videoHeight);
final ctx = canvas.context2D;
ctx.drawImage(video, 0, 0);
final imgData = ctx.getImageData(0, 0, canvas.width!, canvas.height!);
final code = jsQR(imgData.data, canvas.width, canvas.height);
if (code != null) {
controller.add({
'name': 'barcodeWeb',
'data': code.data,
'binaryData': code.binaryData,
});
}
barCodeReader.stop();
await _barCodeStreamSubscription?.cancel();
_barCodeStreamSubscription = null;
}
}
... ...
... ... @@ -99,19 +99,21 @@ class MobileScannerController {
bool isStarting = false;
bool? _hasTorch;
/// A notifier that provides availability of the Torch (Flash)
final ValueNotifier<bool?> hasTorchState = ValueNotifier(false);
/// Returns whether the device has a torch.
///
/// Throws an error if the controller is not initialized.
bool get hasTorch {
if (_hasTorch == null) {
final hasTorch = hasTorchState.value;
if (hasTorch == null) {
throw const MobileScannerException(
errorCode: MobileScannerErrorCode.controllerUninitialized,
);
}
return _hasTorch!;
return hasTorch;
}
/// Set the starting arguments for the camera
... ... @@ -210,8 +212,9 @@ class MobileScannerController {
);
}
_hasTorch = startResult['torchable'] as bool? ?? false;
if (_hasTorch! && torchEnabled) {
final hasTorch = startResult['torchable'] as bool? ?? false;
hasTorchState.value = hasTorch;
if (hasTorch && torchEnabled) {
torchState.value = TorchState.on;
}
... ... @@ -223,7 +226,7 @@ class MobileScannerController {
startResult['videoHeight'] as double? ?? 0,
)
: toSize(startResult['size'] as Map? ?? {}),
hasTorch: _hasTorch!,
hasTorch: hasTorch,
textureId: kIsWeb ? null : startResult['textureId'] as int?,
webId: kIsWeb ? startResult['ViewID'] as String? : null,
);
... ... @@ -244,7 +247,7 @@ class MobileScannerController {
///
/// Throws if the controller was not initialized.
Future<void> toggleTorch() async {
final hasTorch = _hasTorch;
final hasTorch = hasTorchState.value;
if (hasTorch == null) {
throw const MobileScannerException(
... ... @@ -342,11 +345,13 @@ class MobileScannerController {
);
break;
case 'barcodeWeb':
final barcode = data as Map?;
_barcodesController.add(
BarcodeCapture(
barcodes: [
Barcode(
rawValue: data as String?,
rawValue: barcode?['rawValue'] as String?,
rawBytes: barcode?['rawBytes'] as Uint8List?,
)
],
),
... ...
... ... @@ -12,6 +12,14 @@ class MobileScannerException implements Exception {
/// The additional error details that came with the [errorCode].
final MobileScannerErrorDetails? errorDetails;
@override
String toString() {
if (errorDetails != null && errorDetails?.message != null) {
return "MobileScannerException: code ${errorCode.name}, message: ${errorDetails?.message}";
}
return "MobileScannerException: ${errorCode.name}";
}
}
/// The raw error details for a [MobileScannerException].
... ...
import 'dart:html' as html;
import 'package:flutter/material.dart';
import 'package:js/js.dart';
import 'package:js/js_util.dart';
import 'package:mobile_scanner/src/enums/camera_facing.dart';
import 'package:mobile_scanner/src/objects/barcode.dart';
import 'package:mobile_scanner/src/web/media.dart';
abstract class WebBarcodeReaderBase {
/// Timer used to capture frames to be analyzed
final Duration frameInterval;
final html.DivElement videoContainer;
const WebBarcodeReaderBase({
required this.videoContainer,
this.frameInterval = const Duration(milliseconds: 200),
});
bool get isStarted;
int get videoWidth;
int get videoHeight;
/// Starts streaming video
Future<void> start({
required CameraFacing cameraFacing,
});
/// Starts scanning QR codes or barcodes
Stream<Barcode?> detectBarcodeContinuously();
/// Stops streaming video
Future<void> stop();
/// Can enable or disable the flash if available
Future<void> toggleTorch({required bool enabled});
/// Determine whether device has flash
Future<bool> hasTorch();
}
mixin InternalStreamCreation on WebBarcodeReaderBase {
/// The video stream.
/// Will be initialized later to see which camera needs to be used.
html.MediaStream? localMediaStream;
final html.VideoElement video = html.VideoElement();
@override
int get videoWidth => video.videoWidth;
@override
int get videoHeight => video.videoHeight;
Future<html.MediaStream?> initMediaStream(CameraFacing cameraFacing) async {
// Check if browser supports multiple camera's and set if supported
final Map? capabilities =
html.window.navigator.mediaDevices?.getSupportedConstraints();
final Map<String, dynamic> constraints;
if (capabilities != null && capabilities['facingMode'] as bool) {
constraints = {
'video': VideoOptions(
facingMode:
cameraFacing == CameraFacing.front ? 'user' : 'environment',
)
};
} else {
constraints = {'video': true};
}
final stream =
await html.window.navigator.mediaDevices?.getUserMedia(constraints);
return stream;
}
void prepareVideoElement(html.VideoElement videoSource);
Future<void> attachStreamToVideo(
html.MediaStream stream,
html.VideoElement videoSource,
);
@override
Future<void> stop() async {
try {
// Stop the camera stream
localMediaStream?.getTracks().forEach((track) {
if (track.readyState == 'live') {
track.stop();
}
});
} catch (e) {
debugPrint('Failed to stop stream: $e');
}
video.srcObject = null;
localMediaStream = null;
videoContainer.children = [];
}
}
/// Mixin for libraries that don't have built-in torch support
mixin InternalTorchDetection on InternalStreamCreation {
Future<List<String>> getSupportedTorchStates() async {
try {
final track = localMediaStream?.getVideoTracks();
if (track != null) {
final imageCapture = ImageCapture(track.first);
final photoCapabilities = await promiseToFuture<PhotoCapabilities>(
imageCapture.getPhotoCapabilities(),
);
final fillLightMode = photoCapabilities.fillLightMode;
if (fillLightMode != null) {
return fillLightMode;
}
}
} catch (e) {
// ImageCapture is not supported by some browsers:
// https://developer.mozilla.org/en-US/docs/Web/API/ImageCapture#browser_compatibility
}
return [];
}
@override
Future<bool> hasTorch() async {
return (await getSupportedTorchStates()).isNotEmpty;
}
@override
Future<void> toggleTorch({required bool enabled}) async {
final hasTorch = await this.hasTorch();
if (hasTorch) {
final track = localMediaStream?.getVideoTracks();
await track?.first.applyConstraints({
'advanced': [
{'torch': enabled}
]
});
}
}
}
@JS('Promise')
@staticInterop
class Promise<T> {}
@JS()
@anonymous
class PhotoCapabilities {
/// Returns an array of available fill light options. Options include auto, off, or flash.
external List<String>? get fillLightMode;
}
@JS('ImageCapture')
@staticInterop
class ImageCapture {
/// MediaStreamTrack
external factory ImageCapture(dynamic track);
}
extension ImageCaptureExt on ImageCapture {
external Promise<PhotoCapabilities> getPhotoCapabilities();
}
... ...
@JS()
library jsqr;
import 'dart:async';
import 'dart:html';
import 'dart:typed_data';
import 'package:js/js.dart';
import 'package:mobile_scanner/src/enums/camera_facing.dart';
import 'package:mobile_scanner/src/objects/barcode.dart';
import 'package:mobile_scanner/src/web/base.dart';
@JS('jsQR')
external Code? jsQR(dynamic data, int? width, int? height);
... ... @@ -14,3 +19,72 @@ class Code {
external Uint8ClampedList get binaryData;
}
class JsQrCodeReader extends WebBarcodeReaderBase
with InternalStreamCreation, InternalTorchDetection {
JsQrCodeReader({required super.videoContainer});
@override
bool get isStarted => localMediaStream != null;
@override
Future<void> start({
required CameraFacing cameraFacing,
}) async {
videoContainer.children = [video];
final stream = await initMediaStream(cameraFacing);
prepareVideoElement(video);
if (stream != null) {
await attachStreamToVideo(stream, video);
}
}
@override
void prepareVideoElement(VideoElement videoSource) {
// required to tell iOS safari we don't want fullscreen
videoSource.setAttribute('playsinline', 'true');
}
@override
Future<void> attachStreamToVideo(
MediaStream stream,
VideoElement videoSource,
) async {
localMediaStream = stream;
videoSource.srcObject = stream;
await videoSource.play();
}
@override
Stream<Barcode?> detectBarcodeContinuously() async* {
yield* Stream.periodic(frameInterval, (_) {
return _captureFrame(video);
}).asyncMap((event) async {
final code = await event;
if (code == null) {
return null;
}
return Barcode(
rawValue: code.data,
rawBytes: Uint8List.fromList(code.binaryData),
format: BarcodeFormat.qrCode,
);
});
}
/// Captures a frame and analyzes it for QR codes
Future<Code?> _captureFrame(VideoElement video) async {
if (localMediaStream == null) return null;
final canvas =
CanvasElement(width: video.videoWidth, height: video.videoHeight);
final ctx = canvas.context2D;
ctx.drawImage(video, 0, 0);
final imgData = ctx.getImageData(0, 0, canvas.width!, canvas.height!);
final code = jsQR(imgData.data, canvas.width, canvas.height);
return code;
}
}
... ...
@JS()
library qrscanner;
import 'package:js/js.dart';
@JS('QrScanner')
external String scanImage(dynamic data);
@JS()
class QrScanner {
external String get scanImage;
}
import 'dart:async';
import 'dart:html';
import 'dart:typed_data';
import 'package:js/js.dart';
import 'package:mobile_scanner/src/enums/camera_facing.dart';
import 'package:mobile_scanner/src/objects/barcode.dart';
import 'package:mobile_scanner/src/web/base.dart';
@JS('ZXing.BrowserMultiFormatReader')
@staticInterop
class JsZXingBrowserMultiFormatReader {
/// https://github.com/zxing-js/library/blob/1e9ccb3b6b28d75b9eef866dba196d8937eb4449/src/browser/BrowserMultiFormatReader.ts#L11
external factory JsZXingBrowserMultiFormatReader(
dynamic hints,
int timeBetweenScansMillis,
);
}
@JS()
@anonymous
abstract class Result {
/// raw text encoded by the barcode
external String get text;
/// Returns raw bytes encoded by the barcode, if applicable, otherwise null
external Uint8ClampedList? get rawBytes;
/// Representing the format of the barcode that was decoded
external int? format;
}
extension ResultExt on Result {
Barcode toBarcode() {
final rawBytes = this.rawBytes;
return Barcode(
rawValue: text,
rawBytes: rawBytes != null ? Uint8List.fromList(rawBytes) : null,
format: barcodeFormat,
);
}
/// https://github.com/zxing-js/library/blob/1e9ccb3b6b28d75b9eef866dba196d8937eb4449/src/core/BarcodeFormat.ts#L28
BarcodeFormat get barcodeFormat {
switch (format) {
case 1:
return BarcodeFormat.aztec;
case 2:
return BarcodeFormat.codebar;
case 3:
return BarcodeFormat.code39;
case 4:
return BarcodeFormat.code128;
case 5:
return BarcodeFormat.dataMatrix;
case 6:
return BarcodeFormat.ean8;
case 7:
return BarcodeFormat.ean13;
case 8:
return BarcodeFormat.itf;
// case 9:
// return BarcodeFormat.maxicode;
case 10:
return BarcodeFormat.pdf417;
case 11:
return BarcodeFormat.qrCode;
// case 12:
// return BarcodeFormat.rss14;
// case 13:
// return BarcodeFormat.rssExp;
case 14:
return BarcodeFormat.upcA;
case 15:
return BarcodeFormat.upcE;
default:
return BarcodeFormat.unknown;
}
}
}
typedef BarcodeDetectionCallback = void Function(
Result? result,
dynamic error,
);
extension JsZXingBrowserMultiFormatReaderExt
on JsZXingBrowserMultiFormatReader {
external Promise<void> decodeFromVideoElementContinuously(
VideoElement source,
BarcodeDetectionCallback callbackFn,
);
/// Continuously decodes from video input
external void decodeContinuously(
VideoElement element,
BarcodeDetectionCallback callbackFn,
);
external Promise<void> decodeFromStream(
MediaStream stream,
VideoElement videoSource,
BarcodeDetectionCallback callbackFn,
);
external Promise<void> decodeFromConstraints(
dynamic constraints,
VideoElement videoSource,
BarcodeDetectionCallback callbackFn,
);
external void stopContinuousDecode();
external VideoElement prepareVideoElement(VideoElement videoSource);
/// Defines what the [videoElement] src will be.
external void addVideoSource(
VideoElement videoElement,
MediaStream stream,
);
external bool isVideoPlaying(VideoElement video);
external void reset();
/// The HTML video element, used to display the camera stream.
external VideoElement? videoElement;
/// The stream output from camera.
external MediaStream? stream;
}
class ZXingBarcodeReader extends WebBarcodeReaderBase
with InternalStreamCreation, InternalTorchDetection {
late final JsZXingBrowserMultiFormatReader _reader =
JsZXingBrowserMultiFormatReader(
null,
frameInterval.inMilliseconds,
);
ZXingBarcodeReader({required super.videoContainer});
@override
bool get isStarted => localMediaStream != null;
@override
Future<void> start({
required CameraFacing cameraFacing,
}) async {
videoContainer.children = [video];
final stream = await initMediaStream(cameraFacing);
prepareVideoElement(video);
if (stream != null) {
await attachStreamToVideo(stream, video);
}
}
@override
void prepareVideoElement(VideoElement videoSource) {
_reader.prepareVideoElement(videoSource);
}
@override
Future<void> attachStreamToVideo(
MediaStream stream,
VideoElement videoSource,
) async {
_reader.addVideoSource(videoSource, stream);
_reader.videoElement = videoSource;
_reader.stream = stream;
localMediaStream = stream;
await videoSource.play();
}
@override
Stream<Barcode?> detectBarcodeContinuously() {
final controller = StreamController<Barcode?>();
controller.onListen = () async {
_reader.decodeContinuously(
video,
allowInterop((result, error) {
if (result != null) {
controller.add(result.toBarcode());
}
}),
);
};
controller.onCancel = () {
_reader.stopContinuousDecode();
};
return controller.stream;
}
@override
Future<void> stop() async {
_reader.reset();
super.stop();
}
}
... ...
... ... @@ -18,7 +18,7 @@ dependencies:
dev_dependencies:
flutter_test:
sdk: flutter
lint: ^1.10.0
lint: ">=1.10.0 <3.0.0"
flutter:
plugin:
... ...