base.dart
4.11 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
import 'dart:html' as html;
import 'package:flutter/material.dart';
import 'package:js/js.dart';
import 'package:js/js_util.dart';
import 'package:mobile_scanner/src/enums/camera_facing.dart';
import 'package:mobile_scanner/src/objects/barcode.dart';
import 'package:mobile_scanner/src/web/media.dart';
abstract class WebBarcodeReaderBase {
/// Timer used to capture frames to be analyzed
final Duration frameInterval;
final html.DivElement videoContainer;
const WebBarcodeReaderBase({
required this.videoContainer,
this.frameInterval = const Duration(milliseconds: 200),
});
bool get isStarted;
int get videoWidth;
int get videoHeight;
/// Starts streaming video
Future<void> start({
required CameraFacing cameraFacing,
});
/// Starts scanning QR codes or barcodes
Stream<Barcode?> detectBarcodeContinuously();
/// Stops streaming video
Future<void> stop();
/// Can enable or disable the flash if available
Future<void> toggleTorch({required bool enabled});
/// Determine whether device has flash
Future<bool> hasTorch();
}
mixin InternalStreamCreation on WebBarcodeReaderBase {
/// The video stream.
/// Will be initialized later to see which camera needs to be used.
html.MediaStream? localMediaStream;
final html.VideoElement video = html.VideoElement();
@override
int get videoWidth => video.videoWidth;
@override
int get videoHeight => video.videoHeight;
Future<html.MediaStream?> initMediaStream(CameraFacing cameraFacing) async {
// Check if browser supports multiple camera's and set if supported
final Map? capabilities =
html.window.navigator.mediaDevices?.getSupportedConstraints();
final Map<String, dynamic> constraints;
if (capabilities != null && capabilities['facingMode'] as bool) {
constraints = {
'video': VideoOptions(
facingMode:
cameraFacing == CameraFacing.front ? 'user' : 'environment',
)
};
} else {
constraints = {'video': true};
}
final stream =
await html.window.navigator.mediaDevices?.getUserMedia(constraints);
return stream;
}
void prepareVideoElement(html.VideoElement videoSource);
Future<void> attachStreamToVideo(
html.MediaStream stream,
html.VideoElement videoSource,
);
@override
Future<void> stop() async {
try {
// Stop the camera stream
localMediaStream?.getTracks().forEach((track) {
if (track.readyState == 'live') {
track.stop();
}
});
} catch (e) {
debugPrint('Failed to stop stream: $e');
}
video.srcObject = null;
localMediaStream = null;
videoContainer.children = [];
}
}
/// Mixin for libraries that don't have built-in torch support
mixin InternalTorchDetection on InternalStreamCreation {
@override
Future<bool> hasTorch() async {
try {
final track = localMediaStream?.getVideoTracks();
if (track != null) {
final imageCapture = ImageCapture(track.first);
final photoCapabilities = await promiseToFuture<PhotoCapabilities>(
imageCapture.getPhotoCapabilities(),
);
return photoCapabilities.fillLightMode != null;
}
} catch (e) {
// ImageCapture is not supported by some browsers:
// https://developer.mozilla.org/en-US/docs/Web/API/ImageCapture#browser_compatibility
}
return false;
}
@override
Future<void> toggleTorch({required bool enabled}) async {
final hasTorch = await this.hasTorch();
if (hasTorch) {
final track = localMediaStream?.getVideoTracks();
await track?.first.applyConstraints({
'advanced': [
{'torch': enabled}
]
});
}
}
}
@JS('Promise')
@staticInterop
class Promise<T> {}
@JS()
@anonymous
class PhotoCapabilities {
/// Returns an array of available fill light options. Options include auto, off, or flash.
external List<String>? get fillLightMode;
}
@JS('ImageCapture')
@staticInterop
class ImageCapture {
/// MediaStreamTrack
external factory ImageCapture(dynamic track);
}
extension ImageCaptureExt on ImageCapture {
external Promise<PhotoCapabilities> getPhotoCapabilities();
}