diff --git a/ios/ReactNativeCameraKit/CameraManager.swift b/ios/ReactNativeCameraKit/CameraManager.swift index 1d19ccff4..f105d49b2 100644 --- a/ios/ReactNativeCameraKit/CameraManager.swift +++ b/ios/ReactNativeCameraKit/CameraManager.swift @@ -25,12 +25,11 @@ import Foundation resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) { guard let cam = self.camera else { - reject("capture_error", "CKCamera capture() was called but camera view is nil", nil) + reject("capture_error", "CKCamera capture() was called but camera view is nil", nil) return } - cam.capture(options as! [String: Any], - onSuccess: { resolve($0) }, - onError: { reject("capture_error", $0, nil) }) + cam.capture(onSuccess: { resolve($0) }, + onError: { reject("capture_error", $0, nil) }) } @objc func checkDeviceCameraAuthorizationStatus(_ resolve: @escaping RCTPromiseResolveBlock, diff --git a/ios/ReactNativeCameraKit/CameraProtocol.swift b/ios/ReactNativeCameraKit/CameraProtocol.swift index fda57ef29..1400591be 100644 --- a/ios/ReactNativeCameraKit/CameraProtocol.swift +++ b/ios/ReactNativeCameraKit/CameraProtocol.swift @@ -28,6 +28,6 @@ protocol CameraProtocol: AnyObject, FocusInterfaceViewDelegate { func update(scannerFrameSize: CGRect?) func capturePicture(onWillCapture: @escaping () -> Void, - onSuccess: @escaping (_ imageData: Data, _ thumbnailData: Data?) -> (), - onError: @escaping (_ message: String) -> ()) + onSuccess: @escaping (_ imageData: Data, _ thumbnailData: Data?) -> Void, + onError: @escaping (_ message: String) -> Void) } diff --git a/ios/ReactNativeCameraKit/CameraView.swift b/ios/ReactNativeCameraKit/CameraView.swift index 142ee0314..92eb74def 100644 --- a/ios/ReactNativeCameraKit/CameraView.swift +++ b/ios/ReactNativeCameraKit/CameraView.swift @@ -71,7 +71,7 @@ class CameraView: UIView { } private func setupCamera() { - if (hasPropBeenSetup && hasPermissionBeenGranted && !hasCameraBeenSetup) { + if hasPropBeenSetup && hasPermissionBeenGranted && !hasCameraBeenSetup { hasCameraBeenSetup = true camera.setup(cameraType: cameraType, supportedBarcodeType: scanBarcode && onReadCode != nil ? supportedBarcodeType : []) } @@ -141,6 +141,7 @@ class CameraView: UIView { } // Called once when all props have been set, then every time one is updated + // swiftlint:disable:next cyclomatic_complexity function_body_length override func didSetProps(_ changedProps: [String]) { hasPropBeenSetup = true @@ -154,11 +155,11 @@ class CameraView: UIView { if changedProps.contains("cameraType") || changedProps.contains("torchMode") { camera.update(torchMode: torchMode) } - + if changedProps.contains("onOrientationChange") { camera.update(onOrientationChange: onOrientationChange) } - + if changedProps.contains("onZoom") { camera.update(onZoom: onZoom) } @@ -219,11 +220,11 @@ class CameraView: UIView { if changedProps.contains("zoomMode") { self.update(zoomMode: zoomMode) } - + if changedProps.contains("zoom") { camera.update(zoom: zoom?.doubleValue) } - + if changedProps.contains("maxZoom") { camera.update(maxZoom: maxZoom?.doubleValue) } @@ -231,9 +232,8 @@ class CameraView: UIView { // MARK: Public - func capture(_ options: [String: Any], - onSuccess: @escaping (_ imageObject: [String: Any]) -> (), - onError: @escaping (_ error: String) -> ()) { + func capture(onSuccess: @escaping (_ imageObject: [String: Any]) -> Void, + onError: @escaping (_ error: String) -> Void) { camera.capturePicture(onWillCapture: { [weak self] in // Flash/dim preview to indicate shutter action DispatchQueue.main.async { @@ -250,12 +250,12 @@ class CameraView: UIView { } }, onError: onError) } - + // MARK: - Private Helper private func update(zoomMode: ZoomMode) { if zoomMode == .on { - if (zoomGestureRecognizer == nil) { + if zoomGestureRecognizer == nil { let pinchGesture = UIPinchGestureRecognizer(target: self, action: #selector(handlePinchToZoomRecognizer(_:))) addGestureRecognizer(pinchGesture) zoomGestureRecognizer = pinchGesture @@ -267,13 +267,12 @@ class CameraView: UIView { } } } - + private func handleCameraPermission() { switch AVCaptureDevice.authorizationStatus(for: .video) { case .authorized: // The user has previously granted access to the camera. hasPermissionBeenGranted = true - break case .notDetermined: // The user has not yet been presented with the option to grant video access. AVCaptureDevice.requestAccess(for: .video) { [weak self] granted in @@ -289,11 +288,11 @@ class CameraView: UIView { private func writeCaptured(imageData: Data, thumbnailData: Data?, - onSuccess: @escaping (_ imageObject: [String: Any]) -> (), - onError: @escaping (_ error: String) -> ()) { + onSuccess: @escaping (_ imageObject: [String: Any]) -> Void, + onError: @escaping (_ error: String) -> Void) { do { let temporaryImageFileURL = try saveToTmpFolder(imageData) - + onSuccess([ "size": imageData.count, "uri": temporaryImageFileURL.description, diff --git a/ios/ReactNativeCameraKit/FocusInterfaceView.swift b/ios/ReactNativeCameraKit/FocusInterfaceView.swift index f873e8c4a..0f9a02cc4 100644 --- a/ios/ReactNativeCameraKit/FocusInterfaceView.swift +++ b/ios/ReactNativeCameraKit/FocusInterfaceView.swift @@ -82,7 +82,7 @@ class FocusInterfaceView: UIView { func update(focusMode: FocusMode) { if focusMode == .on { - if (focusGestureRecognizer == nil) { + if focusGestureRecognizer == nil { let tapGesture = UITapGestureRecognizer(target: self, action: #selector(focusAndExposeTap(_:))) addGestureRecognizer(tapGesture) focusGestureRecognizer = tapGesture @@ -157,17 +157,17 @@ class FocusInterfaceView: UIView { UIView.animate(withDuration: 0.2, animations: { self.focusView.frame = focusViewFrame self.focusView.alpha = 1 - }) { _ in + }, completion: { _ in self.hideFocusViewTimer?.invalidate() - self.hideFocusViewTimer = Timer.scheduledTimer(withTimeInterval: 2, repeats: false) { [weak self] _ in + self.hideFocusViewTimer = Timer.scheduledTimer(withTimeInterval: 2, repeats: false, block: { [weak self] _ in guard let self else { return } UIView.animate(withDuration: 0.2, animations: { self.focusView.alpha = 0 - }) { _ in + }, completion: { _ in self.focusView.isHidden = true - } - } - } + }) + }) + }) } } diff --git a/ios/ReactNativeCameraKit/PhotoCaptureDelegate.swift b/ios/ReactNativeCameraKit/PhotoCaptureDelegate.swift index 42b4a6eea..800cf1d13 100644 --- a/ios/ReactNativeCameraKit/PhotoCaptureDelegate.swift +++ b/ios/ReactNativeCameraKit/PhotoCaptureDelegate.swift @@ -43,7 +43,7 @@ class PhotoCaptureDelegate: NSObject, AVCapturePhotoCaptureDelegate { return } - var thumbnailData: Data? = nil + var thumbnailData: Data? if let previewPixelBuffer = photo.previewPixelBuffer { let ciImage = CIImage(cvPixelBuffer: previewPixelBuffer) let uiImage = UIImage(ciImage: ciImage) diff --git a/ios/ReactNativeCameraKit/RatioOverlayView.swift b/ios/ReactNativeCameraKit/RatioOverlayView.swift index f65cc7a41..a98e2cc24 100644 --- a/ios/ReactNativeCameraKit/RatioOverlayView.swift +++ b/ios/ReactNativeCameraKit/RatioOverlayView.swift @@ -88,6 +88,7 @@ class RatioOverlayView: UIView { // MARK: - Private + // swiftlint:disable:next function_body_length private func setOverlayParts() { guard let ratioData, ratioData.ratio != 0 else { isHidden = true diff --git a/ios/ReactNativeCameraKit/RealCamera.swift b/ios/ReactNativeCameraKit/RealCamera.swift index 492ad956f..3e97544c1 100644 --- a/ios/ReactNativeCameraKit/RealCamera.swift +++ b/ios/ReactNativeCameraKit/RealCamera.swift @@ -3,6 +3,8 @@ // ReactNativeCameraKit // +// swiftlint:disable file_length + import AVFoundation import UIKit import CoreMotion @@ -10,6 +12,7 @@ import CoreMotion /* * Real camera implementation that uses AVFoundation */ +// swiftlint:disable:next type_body_length class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelegate { var previewView: UIView { cameraPreview } @@ -32,13 +35,13 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega private var resetFocus: (() -> Void)? private var focusFinished: (() -> Void)? private var onBarcodeRead: ((_ barcode: String) -> Void)? - private var scannerFrameSize: CGRect? = nil + private var scannerFrameSize: CGRect? private var onOrientationChange: RCTDirectEventBlock? private var onZoomCallback: RCTDirectEventBlock? private var lastOnZoom: Double? private var zoom: Double? private var maxZoom: Double? - + private var deviceOrientation = UIDeviceOrientation.unknown private var motionManager: CMMotionManager? @@ -52,10 +55,11 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega override init() { super.init() - + // In addition to using accelerometer to determine REAL orientation // we also listen to UI orientation changes (UIDevice does not report rotation if orientation lock is on, so photos aren't rotated correctly) - // When UIDevice reports rotation to the left, UI is rotated right to compensate, but that means we need to re-rotate left to make camera appear correctly (see self.uiOrientationChanged) + // When UIDevice reports rotation to the left, UI is rotated right to compensate, but that means we need to re-rotate left + // to make camera appear correctly (see self.uiOrientationChanged) UIDevice.current.beginGeneratingDeviceOrientationNotifications() NotificationCenter.default.addObserver(forName: UIDevice.orientationDidChangeNotification, object: UIDevice.current, @@ -75,11 +79,11 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega self.removeObservers() } } - + motionManager?.stopAccelerometerUpdates() - + NotificationCenter.default.removeObserver(self, name: UIDevice.orientationDidChangeNotification, object: UIDevice.current) - + UIDevice.current.endGeneratingDeviceOrientationNotifications() } @@ -94,7 +98,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega self.cameraPreview.session = self.session self.cameraPreview.previewLayer.videoGravity = .resizeAspect } - + self.initializeMotionManager() // Setup the capture session. @@ -113,13 +117,13 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega // We need to reapply the configuration after starting the camera self.update(torchMode: self.torchMode) } - + DispatchQueue.main.async { self.setVideoOrientationToInterfaceOrientation() } } } - + private var zoomStartedAt: Double = 1.0 func zoomPinchStart() { sessionQueue.async { @@ -130,13 +134,13 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega func zoomPinchChange(pinchScale: CGFloat) { guard !pinchScale.isNaN else { return } - + sessionQueue.async { guard let videoDevice = self.videoDeviceInput?.device else { return } - + let desiredZoomFactor = (self.zoomStartedAt / self.defaultZoomFactor(for: videoDevice)) * pinchScale let zoomForDevice = self.getValidZoom(forDevice: videoDevice, zoom: desiredZoomFactor) - + if zoomForDevice != self.normalizedZoom(for: videoDevice) { // Only trigger zoom changes if it's an uncontrolled component (zoom isn't manually set) // otherwise it's likely to cause issues inf. loops @@ -147,25 +151,25 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega } } } - + func update(maxZoom: Double?) { self.maxZoom = maxZoom - + // Re-update zoom value in case the max was increased self.update(zoom: self.zoom) } - + func update(zoom: Double?) { sessionQueue.async { self.zoom = zoom guard let videoDevice = self.videoDeviceInput?.device else { return } - guard let zoom_ = zoom else { return } + guard let zoom else { return } - let zoomForDevice = self.getValidZoom(forDevice: videoDevice, zoom: zoom_) + let zoomForDevice = self.getValidZoom(forDevice: videoDevice, zoom: zoom) self.setZoomFor(videoDevice, to: zoomForDevice) } } - + /** `desiredZoom` can be nil when we want to notify what the zoom factor really is */ @@ -174,7 +178,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega let cameraZoom = normalizedZoom(for: videoDevice) let desiredOrCameraZoom = desiredZoom ?? cameraZoom guard desiredOrCameraZoom > -1.0 else { return } - + // ignore duplicate events when zooming to min/max // but always notify if a desiredZoom wasn't given, // since that means they wanted to reset setZoom(0.0) @@ -182,11 +186,11 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega if desiredZoom != nil && desiredOrCameraZoom == lastOnZoom { return } - + lastOnZoom = desiredOrCameraZoom self.onZoomCallback?(["zoom": desiredOrCameraZoom]) } - + func update(onZoom: RCTDirectEventBlock?) { self.onZoomCallback = onZoom } @@ -228,11 +232,11 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega } } } - + func update(onOrientationChange: RCTDirectEventBlock?) { self.onOrientationChange = onOrientationChange } - + func update(torchMode: TorchMode) { sessionQueue.async { self.torchMode = torchMode @@ -300,7 +304,8 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega the main thread and session configuration is done on the session queue. */ DispatchQueue.main.async { - let videoPreviewLayerOrientation = self.videoOrientation(from: self.deviceOrientation) ?? self.cameraPreview.previewLayer.connection?.videoOrientation + let videoPreviewLayerOrientation = + self.videoOrientation(from: self.deviceOrientation) ?? self.cameraPreview.previewLayer.connection?.videoOrientation self.sessionQueue.async { if let photoOutputConnection = self.photoOutput.connection(with: .video), let videoPreviewLayerOrientation { @@ -319,7 +324,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega onWillCapture: onWillCapture, onCaptureSuccess: { uniqueID, imageData, thumbnailData in self.inProgressPhotoCaptureDelegates[uniqueID] = nil - + onSuccess(imageData, thumbnailData) }, onCaptureError: { uniqueID, errorMessage in @@ -365,10 +370,13 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega } DispatchQueue.main.async { - let visibleRect = scannerFrameSize != nil && scannerFrameSize != .zero ? self.cameraPreview.previewLayer.metadataOutputRectConverted(fromLayerRect: scannerFrameSize!) : nil + var visibleRect = nil + if scannerFrameSize != nil && scannerFrameSize != .zero { + visibleRect = self.cameraPreview.previewLayer.metadataOutputRectConverted(fromLayerRect: scannerFrameSize!) + } self.sessionQueue.async { - if (self.metadataOutput.rectOfInterest == visibleRect) { + if self.metadataOutput.rectOfInterest == visibleRect { return } @@ -457,14 +465,13 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega if session.canAddInput(videoDeviceInput) { session.addInput(videoDeviceInput) - + self.videoDeviceInput = videoDeviceInput self.resetZoom(forDevice: videoDevice) } else { return .sessionConfigurationFailed } - if session.canAddOutput(photoOutput) { session.addOutput(photoOutput) @@ -494,11 +501,11 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega private func defaultZoomFactor(for videoDevice: AVCaptureDevice) -> CGFloat { let fallback = 1.0 guard #available(iOS 13.0, *) else { return fallback } - + // Devices that have multiple physical cameras are hidden behind one virtual camera input // The zoom factor defines what physical camera it actually uses // The default lens on the native camera app is the wide angle - if var wideAngleIndex = videoDevice.constituentDevices.firstIndex(where: { $0.deviceType == .builtInWideAngleCamera }) { + if let wideAngleIndex = videoDevice.constituentDevices.firstIndex(where: { $0.deviceType == .builtInWideAngleCamera }) { // .virtualDeviceSwitchOverVideoZoomFactors has the .constituentDevices zoom factor which borders the NEXT device // so we grab the one PRIOR to the wide angle to get the wide angle's zoom factor guard wideAngleIndex >= 1 else { return fallback } @@ -507,7 +514,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega return fallback } - + private func setZoomFor(_ videoDevice: AVCaptureDevice, to zoom: Double) { do { try videoDevice.lockForConfiguration() @@ -523,7 +530,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega let defaultZoom = defaultZoomFactor(for: videoDevice) return videoDevice.videoZoomFactor / defaultZoom } - + private func getValidZoom(forDevice videoDevice: AVCaptureDevice, zoom: Double) -> Double { let defaultZoom = defaultZoomFactor(for: videoDevice) let minZoomFactor = videoDevice.minAvailableVideoZoomFactor / defaultZoom @@ -534,7 +541,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega let cappedZoom = max(minZoomFactor, min(zoom, maxZoomFactor)) return cappedZoom } - + private func resetZoom(forDevice videoDevice: AVCaptureDevice) { var zoomForDevice = getValidZoom(forDevice: videoDevice, zoom: 1) if let zoomPropValue = self.zoom { @@ -594,7 +601,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega adjustingFocusObservation = videoDeviceInput?.device.observe(\.isAdjustingFocus, options: .new, - changeHandler: { [weak self] device, change in + changeHandler: { [weak self] _, change in guard let self, let isFocusing = change.newValue else { return } self.isAdjustingFocus(isFocusing: isFocusing) @@ -616,6 +623,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega } private func removeObservers() { + // swiftlint:disable:next notification_center_detachment NotificationCenter.default.removeObserver(self) adjustingFocusObservation?.invalidate() diff --git a/ios/ReactNativeCameraKit/RealPreviewView.swift b/ios/ReactNativeCameraKit/RealPreviewView.swift index 56c9e96c9..543089bbe 100644 --- a/ios/ReactNativeCameraKit/RealPreviewView.swift +++ b/ios/ReactNativeCameraKit/RealPreviewView.swift @@ -14,6 +14,7 @@ class RealPreviewView: UIView { // Create an accessor for the right layer type var previewLayer: AVCaptureVideoPreviewLayer { // We can safely forcecast here, it can't change at runtime + // swiftlint:disable:next force_cast return layer as! AVCaptureVideoPreviewLayer } diff --git a/ios/ReactNativeCameraKit/ScannerFrameView.swift b/ios/ReactNativeCameraKit/ScannerFrameView.swift index 381da4f8a..161f061db 100644 --- a/ios/ReactNativeCameraKit/ScannerFrameView.swift +++ b/ios/ReactNativeCameraKit/ScannerFrameView.swift @@ -48,7 +48,6 @@ class ScannerFrameView: UIView { laserView.frame = CGRect(x: 2, y: 2, width: frame.size.width - 4, height: 2) } - UIView.animate(withDuration: 3, delay: 0, options: [.autoreverse, .repeat], animations: { self.laserView.center = CGPoint(x: self.frame.size.width / 2, y: self.frame.size.height - 3) }) diff --git a/ios/ReactNativeCameraKit/ScannerInterfaceView.swift b/ios/ReactNativeCameraKit/ScannerInterfaceView.swift index dd6a6c2b5..bedfea1f8 100644 --- a/ios/ReactNativeCameraKit/ScannerInterfaceView.swift +++ b/ios/ReactNativeCameraKit/ScannerInterfaceView.swift @@ -80,6 +80,10 @@ class ScannerInterfaceView: UIView { topOverlayView.frame = CGRect(x: 0, y: 0, width: frame.size.width, height: frameRect.origin.y) leftOverlayView.frame = CGRect(x: 0, y: frameRect.origin.y, width: frameOffset, height: frameHeight) rightOverlayView.frame = CGRect(x: frameRect.size.width + frameOffset, y: frameRect.origin.y, width: frameOffset, height: frameHeight) - bottomOverlayView.frame = CGRect(x: 0, y: frameRect.origin.y + frameHeight, width: frame.size.width, height: frame.size.height - frameRect.origin.y - frameHeight) + bottomOverlayView.frame = CGRect( + x: 0, + y: frameRect.origin.y + frameHeight, + width: frame.size.width, + height: frame.size.height - frameRect.origin.y - frameHeight) } } diff --git a/ios/ReactNativeCameraKit/SimulatorCamera.swift b/ios/ReactNativeCameraKit/SimulatorCamera.swift index be15c83b8..9eafdcc89 100644 --- a/ios/ReactNativeCameraKit/SimulatorCamera.swift +++ b/ios/ReactNativeCameraKit/SimulatorCamera.swift @@ -31,7 +31,7 @@ class SimulatorCamera: CameraProtocol { DispatchQueue.main.async { self.mockPreview.cameraTypeLabel.text = "Camera type: \(cameraType)" } - + // Listen to orientation changes UIDevice.current.beginGeneratingDeviceOrientationNotifications() NotificationCenter.default.addObserver(forName: UIDevice.orientationDidChangeNotification, @@ -39,9 +39,8 @@ class SimulatorCamera: CameraProtocol { queue: nil, using: { [weak self] notification in self?.orientationChanged(notification: notification) }) - } - + private func orientationChanged(notification: Notification) { guard let device = notification.object as? UIDevice, let orientation = Orientation(from: device.orientation) else { @@ -50,7 +49,7 @@ class SimulatorCamera: CameraProtocol { self.onOrientationChange?(["orientation": orientation.rawValue]) } - + func cameraRemovedFromSuperview() { NotificationCenter.default.removeObserver(self, name: UIDevice.orientationDidChangeNotification, object: UIDevice.current) @@ -59,16 +58,16 @@ class SimulatorCamera: CameraProtocol { func update(onOrientationChange: RCTDirectEventBlock?) { self.onOrientationChange = onOrientationChange } - + func update(onZoom: RCTDirectEventBlock?) { self.onZoom = onZoom } - + func setVideoDevice(zoomFactor: Double) { self.videoDeviceZoomFactor = zoomFactor self.mockPreview.zoomLabel.text = "Zoom: \(zoomFactor)" } - + private var zoomStartedAt: Double = 1.0 func zoomPinchStart() { DispatchQueue.main.async { @@ -76,10 +75,10 @@ class SimulatorCamera: CameraProtocol { self.mockPreview.zoomLabel.text = "Zoom start" } } - + func zoomPinchChange(pinchScale: CGFloat) { guard !pinchScale.isNaN else { return } - + DispatchQueue.main.async { let desiredZoomFactor = self.zoomStartedAt * pinchScale var maxZoomFactor = self.videoDeviceMaxAvailableVideoZoomFactor @@ -87,7 +86,7 @@ class SimulatorCamera: CameraProtocol { maxZoomFactor = min(maxZoom, maxZoomFactor) } let zoomForDevice = max(1.0, min(desiredZoomFactor, maxZoomFactor)) - + if zoomForDevice != self.videoDeviceZoomFactor { // Only trigger zoom changes if it's an uncontrolled component (zoom isn't manually set) // otherwise it's likely to cause issues inf. loops @@ -132,14 +131,14 @@ class SimulatorCamera: CameraProtocol { self.mockPreview.randomize() } } - + func update(maxZoom: Double?) { self.maxZoom = maxZoom } - + func update(zoom: Double?) { self.zoom = zoom - + DispatchQueue.main.async { var zoomOrDefault = zoom ?? 0 // -1 will reset to zoom default (which is not 1 on modern cameras) @@ -153,7 +152,7 @@ class SimulatorCamera: CameraProtocol { } let zoomForDevice = max(1.0, min(zoomOrDefault, maxZoomFactor)) self.setVideoDevice(zoomFactor: zoomForDevice) - + // If they wanted to reset, tell them what the default zoom turned out to be // regardless if it's controlled if self.zoom == nil || zoom == 0 { @@ -161,7 +160,6 @@ class SimulatorCamera: CameraProtocol { } } } - func isBarcodeScannerEnabled(_ isEnabled: Bool, supportedBarcodeType: [AVMetadataObject.ObjectType], @@ -169,8 +167,8 @@ class SimulatorCamera: CameraProtocol { func update(scannerFrameSize: CGRect?) {} func capturePicture(onWillCapture: @escaping () -> Void, - onSuccess: @escaping (_ imageData: Data, _ thumbnailData: Data?) -> (), - onError: @escaping (_ message: String) -> ()) { + onSuccess: @escaping (_ imageData: Data, _ thumbnailData: Data?) -> Void, + onError: @escaping (_ message: String) -> Void) { onWillCapture() DispatchQueue.main.async {