Newer
Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
import Combine
import AVFoundation
final class CameraController: NSObject {
var dataPublisher: AnyPublisher<Data, Never> {
dataSubject
.receive(on: DispatchQueue.main)
.eraseToAnyPublisher()
}
lazy var previewLayer: CALayer = {
let layer = AVCaptureVideoPreviewLayer(session: session)
layer.videoGravity = .resizeAspectFill
return layer
}()
private let session = AVCaptureSession()
private let metadataOutput = AVCaptureMetadataOutput()
private let dataSubject = PassthroughSubject<Data, Never>()
override init() {
super.init()
setupCameraDevice()
}
func start() {
guard session.isRunning == false else { return }
session.startRunning()
}
func stop() {
guard session.isRunning == true else { return }
session.stopRunning()
}
private func setupCameraDevice() {
if let captureDevice = AVCaptureDevice.default(for: .video),
let input = try? AVCaptureDeviceInput(device: captureDevice) {
if session.canAddInput(input) && session.canAddOutput(metadataOutput) {
session.addInput(input)
session.addOutput(metadataOutput)
}
metadataOutput.setMetadataObjectsDelegate(self, queue: .main)
metadataOutput.metadataObjectTypes = [.qr]
}
}
func metadataOutput(
_ output: AVCaptureMetadataOutput,
didOutput metadataObjects: [AVMetadataObject],
from connection: AVCaptureConnection
) {
guard let object = metadataObjects.first as? AVMetadataMachineReadableCodeObject,
let data = object.stringValue?.data(using: .nonLossyASCII), object.type == .qr else { return }
dataSubject.send(data)
}
}
extension CameraController: AVCaptureMetadataOutputObjectsDelegate {}