Julian Steenbakker
Committed by GitHub

Merge branch 'master' into dependabot/pub/lint-2.0.1

... ... @@ -45,12 +45,9 @@ Ensure that you granted camera permission in XCode -> Signing & Capabilities:
Add this to `web/index.html`:
```html
<script src="https://cdn.jsdelivr.net/npm/jsqr@1.4.0/dist/jsQR.min.js"></script>
<script type="text/javascript" src="https://unpkg.com/@zxing/library@0.19.1"></script>
```
Web only supports QR codes for now.
Do you have experience with Flutter Web development? [Help me with migrating from jsQR to qr-scanner for full barcode support!](https://github.com/juliansteenbakker/mobile_scanner/issues/54)
## Features Supported
| Features | Android | iOS | macOS | Web |
... ...
... ... @@ -28,8 +28,8 @@
<title>example</title>
<link rel="manifest" href="manifest.json">
<!-- <script src="https://cdn.jsdelivr.net/npm/qr-scanner@1.4.1/qr-scanner.min.js"></script>-->
<script src="https://cdn.jsdelivr.net/npm/jsqr@1.4.0/dist/jsQR.min.js"></script>
<script type="text/javascript" src="https://unpkg.com/@zxing/library@0.19.1"></script>
</head>
<body>
<!-- This script installs service_worker.js to provide PWA functionality to
... ...
library mobile_scanner_web;
export 'src/web/base.dart';
export 'src/web/jsqr.dart';
export 'src/web/zxing.dart';
... ...
... ... @@ -2,12 +2,10 @@ import 'dart:async';
import 'dart:html' as html;
import 'dart:ui' as ui;
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
import 'package:flutter_web_plugins/flutter_web_plugins.dart';
import 'package:mobile_scanner/mobile_scanner_web.dart';
import 'package:mobile_scanner/src/enums/camera_facing.dart';
import 'package:mobile_scanner/src/web/jsqr.dart';
import 'package:mobile_scanner/src/web/media.dart';
/// This plugin is the web implementation of mobile_scanner.
/// It only supports QR codes.
... ... @@ -32,20 +30,14 @@ class MobileScannerWebPlugin {
// Controller to send events back to the framework
StreamController controller = StreamController.broadcast();
// The video stream. Will be initialized later to see which camera needs to be used.
html.MediaStream? _localStream;
html.VideoElement video = html.VideoElement();
// ID of the video feed
String viewID = 'WebScanner-${DateTime.now().millisecondsSinceEpoch}';
// Determine wether device has flas
bool hasFlash = false;
// Timer used to capture frames to be analyzed
Timer? _frameInterval;
static final html.DivElement vidDiv = html.DivElement();
html.DivElement vidDiv = html.DivElement();
static WebBarcodeReaderBase barCodeReader =
ZXingBarcodeReader(videoContainer: vidDiv);
StreamSubscription? _barCodeStreamSubscription;
/// Handle incomming messages
Future<dynamic> handleMethodCall(MethodCall call) async {
... ... @@ -67,20 +59,11 @@ class MobileScannerWebPlugin {
/// Can enable or disable the flash if available
Future<void> _torch(arguments) async {
if (hasFlash) {
final track = _localStream?.getVideoTracks();
await track!.first.applyConstraints({
'advanced': {'torch': arguments == 1}
});
} else {
controller.addError('Device has no flash');
}
barCodeReader.toggleTorch(enabled: arguments == 1);
}
/// Starts the video stream and the scanner
Future<Map> _start(Map arguments) async {
vidDiv.children = [video];
var cameraFacing = CameraFacing.front;
if (arguments.containsKey('facing')) {
cameraFacing = CameraFacing.values[arguments['facing'] as int];
... ... @@ -90,64 +73,45 @@ class MobileScannerWebPlugin {
// ignore: UNDEFINED_PREFIXED_NAME, avoid_dynamic_calls
ui.platformViewRegistry.registerViewFactory(
viewID,
(int id) => vidDiv
..style.width = '100%'
..style.height = '100%',
(int id) {
return vidDiv
..style.width = '100%'
..style.height = '100%';
},
);
// Check if stream is running
if (_localStream != null) {
if (barCodeReader.isStarted) {
return {
'ViewID': viewID,
'videoWidth': video.videoWidth,
'videoHeight': video.videoHeight
'videoWidth': barCodeReader.videoWidth,
'videoHeight': barCodeReader.videoHeight,
'torchable': barCodeReader.hasTorch,
};
}
try {
// Check if browser supports multiple camera's and set if supported
final Map? capabilities =
html.window.navigator.mediaDevices?.getSupportedConstraints();
if (capabilities != null && capabilities['facingMode'] as bool) {
final constraints = {
'video': VideoOptions(
facingMode:
cameraFacing == CameraFacing.front ? 'user' : 'environment',
)
};
_localStream =
await html.window.navigator.mediaDevices?.getUserMedia(constraints);
} else {
_localStream = await html.window.navigator.mediaDevices
?.getUserMedia({'video': true});
}
video.srcObject = _localStream;
// TODO: fix flash light. See https://github.com/dart-lang/sdk/issues/48533
// final track = _localStream?.getVideoTracks();
// if (track != null) {
// final imageCapture = html.ImageCapture(track.first);
// final photoCapabilities = await imageCapture.getPhotoCapabilities();
// }
// required to tell iOS safari we don't want fullscreen
video.setAttribute('playsinline', 'true');
await video.play();
// Then capture a frame to be analyzed every 200 miliseconds
_frameInterval =
Timer.periodic(const Duration(milliseconds: 200), (timer) {
_captureFrame();
await barCodeReader.start(
cameraFacing: cameraFacing,
);
_barCodeStreamSubscription =
barCodeReader.detectBarcodeContinuously().listen((code) {
if (code != null) {
controller.add({
'name': 'barcodeWeb',
'data': {
'rawValue': code.rawValue,
'rawBytes': code.rawBytes,
},
});
}
});
return {
'ViewID': viewID,
'videoWidth': video.videoWidth,
'videoHeight': video.videoHeight,
'torchable': hasFlash
'videoWidth': barCodeReader.videoWidth,
'videoHeight': barCodeReader.videoHeight,
'torchable': barCodeReader.hasTorch,
};
} catch (e) {
throw PlatformException(code: 'MobileScannerWeb', message: '$e');
... ... @@ -170,40 +134,8 @@ class MobileScannerWebPlugin {
/// Stops the video feed and analyzer
Future<void> cancel() async {
try {
// Stop the camera stream
_localStream?.getTracks().forEach((track) {
if (track.readyState == 'live') {
track.stop();
}
});
} catch (e) {
debugPrint('Failed to stop stream: $e');
}
video.srcObject = null;
_localStream = null;
_frameInterval?.cancel();
_frameInterval = null;
}
/// Captures a frame and analyzes it for QR codes
Future<dynamic> _captureFrame() async {
if (_localStream == null) return null;
final canvas =
html.CanvasElement(width: video.videoWidth, height: video.videoHeight);
final ctx = canvas.context2D;
ctx.drawImage(video, 0, 0);
final imgData = ctx.getImageData(0, 0, canvas.width!, canvas.height!);
final code = jsQR(imgData.data, canvas.width, canvas.height);
if (code != null) {
controller.add({
'name': 'barcodeWeb',
'data': code.data,
'binaryData': code.binaryData,
});
}
barCodeReader.stop();
await _barCodeStreamSubscription?.cancel();
_barCodeStreamSubscription = null;
}
}
... ...
... ... @@ -329,11 +329,13 @@ class MobileScannerController {
);
break;
case 'barcodeWeb':
final barcode = data as Map?;
_barcodesController.add(
BarcodeCapture(
barcodes: [
Barcode(
rawValue: data as String?,
rawValue: barcode?['rawValue'] as String?,
rawBytes: barcode?['rawBytes'] as Uint8List?,
)
],
),
... ...
import 'dart:html';
import 'package:flutter/material.dart';
import 'package:mobile_scanner/src/enums/camera_facing.dart';
import 'package:mobile_scanner/src/objects/barcode.dart';
import 'package:mobile_scanner/src/web/media.dart';
abstract class WebBarcodeReaderBase {
/// Timer used to capture frames to be analyzed
final Duration frameInterval;
final DivElement videoContainer;
const WebBarcodeReaderBase({
required this.videoContainer,
this.frameInterval = const Duration(milliseconds: 200),
});
bool get isStarted;
int get videoWidth;
int get videoHeight;
/// Starts streaming video
Future<void> start({
required CameraFacing cameraFacing,
});
/// Starts scanning QR codes or barcodes
Stream<Barcode?> detectBarcodeContinuously();
/// Stops streaming video
Future<void> stop();
/// Can enable or disable the flash if available
Future<void> toggleTorch({required bool enabled});
/// Determine whether device has flash
bool get hasTorch;
}
mixin InternalStreamCreation on WebBarcodeReaderBase {
/// The video stream.
/// Will be initialized later to see which camera needs to be used.
MediaStream? localMediaStream;
final VideoElement video = VideoElement();
@override
int get videoWidth => video.videoWidth;
@override
int get videoHeight => video.videoHeight;
Future<MediaStream?> initMediaStream(CameraFacing cameraFacing) async {
// Check if browser supports multiple camera's and set if supported
final Map? capabilities =
window.navigator.mediaDevices?.getSupportedConstraints();
final Map<String, dynamic> constraints;
if (capabilities != null && capabilities['facingMode'] as bool) {
constraints = {
'video': VideoOptions(
facingMode:
cameraFacing == CameraFacing.front ? 'user' : 'environment',
)
};
} else {
constraints = {'video': true};
}
final stream =
await window.navigator.mediaDevices?.getUserMedia(constraints);
return stream;
}
void prepareVideoElement(VideoElement videoSource);
Future<void> attachStreamToVideo(
MediaStream stream,
VideoElement videoSource,
);
@override
Future<void> stop() async {
try {
// Stop the camera stream
localMediaStream?.getTracks().forEach((track) {
if (track.readyState == 'live') {
track.stop();
}
});
} catch (e) {
debugPrint('Failed to stop stream: $e');
}
video.srcObject = null;
localMediaStream = null;
videoContainer.children = [];
}
}
/// Mixin for libraries that don't have built-in torch support
mixin InternalTorchDetection on InternalStreamCreation {
@override
bool get hasTorch {
// TODO: fix flash light. See https://github.com/dart-lang/sdk/issues/48533
// final track = _localStream?.getVideoTracks();
// if (track != null) {
// final imageCapture = html.ImageCapture(track.first);
// final photoCapabilities = await imageCapture.getPhotoCapabilities();
// }
return false;
}
@override
Future<void> toggleTorch({required bool enabled}) async {
if (hasTorch) {
final track = localMediaStream?.getVideoTracks();
await track?.first.applyConstraints({
'advanced': [
{'torch': enabled}
]
});
}
}
}
... ...
@JS()
library jsqr;
import 'dart:async';
import 'dart:html';
import 'dart:typed_data';
import 'package:js/js.dart';
import 'package:mobile_scanner/src/enums/camera_facing.dart';
import 'package:mobile_scanner/src/objects/barcode.dart';
import 'package:mobile_scanner/src/web/base.dart';
@JS('jsQR')
external Code? jsQR(dynamic data, int? width, int? height);
... ... @@ -14,3 +19,72 @@ class Code {
external Uint8ClampedList get binaryData;
}
class JsQrCodeReader extends WebBarcodeReaderBase
with InternalStreamCreation, InternalTorchDetection {
JsQrCodeReader({required super.videoContainer});
@override
bool get isStarted => localMediaStream != null;
@override
Future<void> start({
required CameraFacing cameraFacing,
}) async {
videoContainer.children = [video];
final stream = await initMediaStream(cameraFacing);
prepareVideoElement(video);
if (stream != null) {
await attachStreamToVideo(stream, video);
}
}
@override
void prepareVideoElement(VideoElement videoSource) {
// required to tell iOS safari we don't want fullscreen
videoSource.setAttribute('playsinline', 'true');
}
@override
Future<void> attachStreamToVideo(
MediaStream stream,
VideoElement videoSource,
) async {
localMediaStream = stream;
videoSource.srcObject = stream;
await videoSource.play();
}
@override
Stream<Barcode?> detectBarcodeContinuously() async* {
yield* Stream.periodic(frameInterval, (_) {
return _captureFrame(video);
}).asyncMap((event) async {
final code = await event;
if (code == null) {
return null;
}
return Barcode(
rawValue: code.data,
rawBytes: Uint8List.fromList(code.binaryData),
format: BarcodeFormat.qrCode,
);
});
}
/// Captures a frame and analyzes it for QR codes
Future<Code?> _captureFrame(VideoElement video) async {
if (localMediaStream == null) return null;
final canvas =
CanvasElement(width: video.videoWidth, height: video.videoHeight);
final ctx = canvas.context2D;
ctx.drawImage(video, 0, 0);
final imgData = ctx.getImageData(0, 0, canvas.width!, canvas.height!);
final code = jsQR(imgData.data, canvas.width, canvas.height);
return code;
}
}
... ...
@JS()
library qrscanner;
import 'package:js/js.dart';
@JS('QrScanner')
external String scanImage(dynamic data);
@JS()
class QrScanner {
external String get scanImage;
}
import 'dart:async';
import 'dart:html';
import 'dart:typed_data';
import 'package:js/js.dart';
import 'package:mobile_scanner/src/enums/camera_facing.dart';
import 'package:mobile_scanner/src/objects/barcode.dart';
import 'package:mobile_scanner/src/web/base.dart';
@JS('Promise')
@staticInterop
class Promise<T> {}
@JS('ZXing.BrowserMultiFormatReader')
@staticInterop
class JsZXingBrowserMultiFormatReader {
/// https://github.com/zxing-js/library/blob/1e9ccb3b6b28d75b9eef866dba196d8937eb4449/src/browser/BrowserMultiFormatReader.ts#L11
external factory JsZXingBrowserMultiFormatReader(
dynamic hints,
int timeBetweenScansMillis,
);
}
@JS()
@anonymous
abstract class Result {
/// raw text encoded by the barcode
external String get text;
/// Returns raw bytes encoded by the barcode, if applicable, otherwise null
external Uint8ClampedList? get rawBytes;
/// Representing the format of the barcode that was decoded
external int? format;
}
extension ResultExt on Result {
Barcode toBarcode() {
final rawBytes = this.rawBytes;
return Barcode(
rawValue: text,
rawBytes: rawBytes != null ? Uint8List.fromList(rawBytes) : null,
format: barcodeFormat,
);
}
/// https://github.com/zxing-js/library/blob/1e9ccb3b6b28d75b9eef866dba196d8937eb4449/src/core/BarcodeFormat.ts#L28
BarcodeFormat get barcodeFormat {
switch (format) {
case 1:
return BarcodeFormat.aztec;
case 2:
return BarcodeFormat.codebar;
case 3:
return BarcodeFormat.code39;
case 4:
return BarcodeFormat.code128;
case 5:
return BarcodeFormat.dataMatrix;
case 6:
return BarcodeFormat.ean8;
case 7:
return BarcodeFormat.ean13;
case 8:
return BarcodeFormat.itf;
// case 9:
// return BarcodeFormat.maxicode;
case 10:
return BarcodeFormat.pdf417;
case 11:
return BarcodeFormat.qrCode;
// case 12:
// return BarcodeFormat.rss14;
// case 13:
// return BarcodeFormat.rssExp;
case 14:
return BarcodeFormat.upcA;
case 15:
return BarcodeFormat.upcE;
default:
return BarcodeFormat.unknown;
}
}
}
typedef BarcodeDetectionCallback = void Function(
Result? result,
dynamic error,
);
extension JsZXingBrowserMultiFormatReaderExt
on JsZXingBrowserMultiFormatReader {
external Promise<void> decodeFromVideoElementContinuously(
VideoElement source,
BarcodeDetectionCallback callbackFn,
);
/// Continuously decodes from video input
external void decodeContinuously(
VideoElement element,
BarcodeDetectionCallback callbackFn,
);
external Promise<void> decodeFromStream(
MediaStream stream,
VideoElement videoSource,
BarcodeDetectionCallback callbackFn,
);
external Promise<void> decodeFromConstraints(
dynamic constraints,
VideoElement videoSource,
BarcodeDetectionCallback callbackFn,
);
external void stopContinuousDecode();
external VideoElement prepareVideoElement(VideoElement videoSource);
/// Defines what the [videoElement] src will be.
external void addVideoSource(
VideoElement videoElement,
MediaStream stream,
);
external bool isVideoPlaying(VideoElement video);
external void reset();
/// The HTML video element, used to display the camera stream.
external VideoElement? videoElement;
/// The stream output from camera.
external MediaStream? stream;
}
class ZXingBarcodeReader extends WebBarcodeReaderBase
with InternalStreamCreation, InternalTorchDetection {
late final JsZXingBrowserMultiFormatReader _reader =
JsZXingBrowserMultiFormatReader(
null,
frameInterval.inMilliseconds,
);
ZXingBarcodeReader({required super.videoContainer});
@override
bool get isStarted => localMediaStream != null;
@override
Future<void> start({
required CameraFacing cameraFacing,
}) async {
videoContainer.children = [video];
final stream = await initMediaStream(cameraFacing);
prepareVideoElement(video);
if (stream != null) {
await attachStreamToVideo(stream, video);
}
}
@override
void prepareVideoElement(VideoElement videoSource) {
_reader.prepareVideoElement(videoSource);
}
@override
Future<void> attachStreamToVideo(
MediaStream stream,
VideoElement videoSource,
) async {
_reader.addVideoSource(videoSource, stream);
_reader.videoElement = videoSource;
_reader.stream = stream;
localMediaStream = stream;
await videoSource.play();
}
@override
Stream<Barcode?> detectBarcodeContinuously() {
final controller = StreamController<Barcode?>();
controller.onListen = () async {
_reader.decodeContinuously(
video,
allowInterop((result, error) {
if (result != null) {
controller.add(result.toBarcode());
}
}),
);
};
controller.onCancel = () {
_reader.stopContinuousDecode();
};
return controller.stream;
}
@override
Future<void> stop() async {
_reader.reset();
super.stop();
}
}
... ...