From 9342a1d2d99cf180fffca13a493510b341d108bc Mon Sep 17 00:00:00 2001 From: David Bertet Date: Mon, 5 Jun 2023 00:16:16 -0700 Subject: [PATCH 01/20] Lay the foundation for a Swift migration --- ReactNativeCameraKit.podspec | 2 +- example/ios/Podfile.lock | 2 +- .../project.pbxproj | 69 +++++---- ios/ReactNativeCameraKit/CKCamera.h | 57 -------- ios/ReactNativeCameraKit/CKCamera.m | 97 ++++--------- ios/ReactNativeCameraKit/CKCamera.swift | 121 ++++++++++++++++ ios/ReactNativeCameraKit/CKCameraManager.h | 15 -- ios/ReactNativeCameraKit/CKCameraManager.m | 77 +++------- .../CKCameraManager.swift | 48 +++++++ .../CKCameraOverlayView.h | 16 --- .../CKCameraOverlayView.m | 100 ------------- ios/ReactNativeCameraKit/CKCompressedImage.h | 10 -- ios/ReactNativeCameraKit/CKCompressedImage.m | 40 ------ .../CKCompressedImage.swift | 51 +++++++ ios/ReactNativeCameraKit/CKMockPreview.h | 19 --- ios/ReactNativeCameraKit/CKMockPreview.m | 135 ------------------ ios/ReactNativeCameraKit/CKMockPreview.swift | 132 +++++++++++++++++ ios/ReactNativeCameraKit/CKOverlayObject.h | 13 -- ios/ReactNativeCameraKit/CKOverlayObject.m | 44 ------ .../CKOverlayObject.swift | 36 +++++ .../CKRatioOverlayView.swift | 121 ++++++++++++++++ ios/ReactNativeCameraKit/CKTypes+RCTConvert.m | 44 ++++++ ios/ReactNativeCameraKit/CKTypes.swift | 85 +++++++++++ .../ReactNativeCameraKit-Bridging-Header.h | 15 ++ .../ReactNativeCameraKit.h | 10 ++ 25 files changed, 755 insertions(+), 604 deletions(-) create mode 100644 ios/ReactNativeCameraKit/CKCamera.swift delete mode 100644 ios/ReactNativeCameraKit/CKCameraManager.h create mode 100644 ios/ReactNativeCameraKit/CKCameraManager.swift delete mode 100644 ios/ReactNativeCameraKit/CKCameraOverlayView.h delete mode 100644 ios/ReactNativeCameraKit/CKCameraOverlayView.m delete mode 100644 ios/ReactNativeCameraKit/CKCompressedImage.h delete mode 100644 ios/ReactNativeCameraKit/CKCompressedImage.m create mode 100644 ios/ReactNativeCameraKit/CKCompressedImage.swift delete mode 100644 ios/ReactNativeCameraKit/CKMockPreview.h delete mode 100644 ios/ReactNativeCameraKit/CKMockPreview.m create mode 100644 ios/ReactNativeCameraKit/CKMockPreview.swift delete mode 100644 ios/ReactNativeCameraKit/CKOverlayObject.h delete mode 100644 ios/ReactNativeCameraKit/CKOverlayObject.m create mode 100644 ios/ReactNativeCameraKit/CKOverlayObject.swift create mode 100644 ios/ReactNativeCameraKit/CKRatioOverlayView.swift create mode 100644 ios/ReactNativeCameraKit/CKTypes+RCTConvert.m create mode 100644 ios/ReactNativeCameraKit/CKTypes.swift create mode 100644 ios/ReactNativeCameraKit/ReactNativeCameraKit-Bridging-Header.h create mode 100644 ios/ReactNativeCameraKit/ReactNativeCameraKit.h diff --git a/ReactNativeCameraKit.podspec b/ReactNativeCameraKit.podspec index 2beadd754c..f7fd54f419 100644 --- a/ReactNativeCameraKit.podspec +++ b/ReactNativeCameraKit.podspec @@ -13,7 +13,7 @@ Pod::Spec.new do |s| s.platform = :ios, "10.0" s.source = { :git => "https://github.com/teslamotors/react-native-camera-kit.git", :tag => "v#{s.version}" } - s.source_files = "ios/**/*.{h,m}" + s.source_files = "ios/**/*.{h,m,swift}" s.dependency 'React-Core' end diff --git a/example/ios/Podfile.lock b/example/ios/Podfile.lock index 14b83a2371..36bda45d31 100644 --- a/example/ios/Podfile.lock +++ b/example/ios/Podfile.lock @@ -619,7 +619,7 @@ SPEC CHECKSUMS: React-RCTVibration: 43ffd976a25f6057a7cf95ea3648ba4e00287f89 React-runtimeexecutor: 7c51ae9d4b3e9608a2366e39ccaa606aa551b9ed ReactCommon: 85c98ab0a509e70bf5ee5d9715cf68dbf495b84c - ReactNativeCameraKit: 27815444201554dac88ed9d582c4ec549f63206a + ReactNativeCameraKit: db076da0ee876061ebed480aed85af13d0b80ed5 SocketRocket: fccef3f9c5cedea1353a9ef6ada904fde10d6608 Yoga: 065f0b74dba4832d6e328238de46eb72c5de9556 YogaKit: f782866e155069a2cca2517aafea43200b01fd5a diff --git a/ios/ReactNativeCameraKit.xcodeproj/project.pbxproj b/ios/ReactNativeCameraKit.xcodeproj/project.pbxproj index 6a28265e0a..20970b0d4b 100644 --- a/ios/ReactNativeCameraKit.xcodeproj/project.pbxproj +++ b/ios/ReactNativeCameraKit.xcodeproj/project.pbxproj @@ -9,10 +9,14 @@ /* Begin PBXBuildFile section */ 26550AF61CFC7086007FF2DF /* CKCameraManager.m in Sources */ = {isa = PBXBuildFile; fileRef = 26550AF51CFC7086007FF2DF /* CKCameraManager.m */; }; 2685AA241CFD89A300E4A446 /* CKCamera.m in Sources */ = {isa = PBXBuildFile; fileRef = 2685AA231CFD89A300E4A446 /* CKCamera.m */; }; - 269292831D3B7D6000E07DDF /* CKCameraOverlayView.m in Sources */ = {isa = PBXBuildFile; fileRef = 269292821D3B7D6000E07DDF /* CKCameraOverlayView.m */; }; - 269292861D3B81C800E07DDF /* CKOverlayObject.m in Sources */ = {isa = PBXBuildFile; fileRef = 269292851D3B81C800E07DDF /* CKOverlayObject.m */; }; - A7686BFE1EC9CFEC00959216 /* CKCompressedImage.m in Sources */ = {isa = PBXBuildFile; fileRef = A7686BFD1EC9CFEC00959216 /* CKCompressedImage.m */; }; - FC8E10CF253F8A23006D5AD0 /* CKMockPreview.m in Sources */ = {isa = PBXBuildFile; fileRef = FC8E10CE253F8A23006D5AD0 /* CKMockPreview.m */; }; + 4620AA6A2A2BFDC400BC8929 /* CKOverlayObject.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4620AA692A2BFDC400BC8929 /* CKOverlayObject.swift */; }; + 4620AA6C2A2C03FC00BC8929 /* CKRatioOverlayView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4620AA6B2A2C03FC00BC8929 /* CKRatioOverlayView.swift */; }; + 4620AA6E2A2C090500BC8929 /* CKCompressedImage.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4620AA6D2A2C090500BC8929 /* CKCompressedImage.swift */; }; + 4620AA702A2C4A5F00BC8929 /* CKMockPreview.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4620AA6F2A2C4A5F00BC8929 /* CKMockPreview.swift */; }; + 4620AA722A2C4FA500BC8929 /* CKCameraManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4620AA712A2C4FA500BC8929 /* CKCameraManager.swift */; }; + 4620AA742A2C52C300BC8929 /* CKCamera.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4620AA732A2C52C300BC8929 /* CKCamera.swift */; }; + 463096882A2C757F002ABA1A /* CKTypes+RCTConvert.m in Sources */ = {isa = PBXBuildFile; fileRef = 463096872A2C757F002ABA1A /* CKTypes+RCTConvert.m */; }; + 4630968B2A2D5423002ABA1A /* CKTypes.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4630968A2A2D5423002ABA1A /* CKTypes.swift */; }; /* End PBXBuildFile section */ /* Begin PBXCopyFilesBuildPhase section */ @@ -29,18 +33,19 @@ /* Begin PBXFileReference section */ 2646934E1CFB2A6B00F3A740 /* libReactNativeCameraKit.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = libReactNativeCameraKit.a; sourceTree = BUILT_PRODUCTS_DIR; }; - 26550AF41CFC7086007FF2DF /* CKCameraManager.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CKCameraManager.h; sourceTree = ""; }; 26550AF51CFC7086007FF2DF /* CKCameraManager.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CKCameraManager.m; sourceTree = ""; }; 2685AA221CFD89A300E4A446 /* CKCamera.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CKCamera.h; sourceTree = ""; }; 2685AA231CFD89A300E4A446 /* CKCamera.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CKCamera.m; sourceTree = ""; }; - 269292811D3B7D6000E07DDF /* CKCameraOverlayView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CKCameraOverlayView.h; sourceTree = ""; }; - 269292821D3B7D6000E07DDF /* CKCameraOverlayView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CKCameraOverlayView.m; sourceTree = ""; }; - 269292841D3B81C800E07DDF /* CKOverlayObject.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CKOverlayObject.h; sourceTree = ""; }; - 269292851D3B81C800E07DDF /* CKOverlayObject.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CKOverlayObject.m; sourceTree = ""; }; - A7686BFC1EC9CFEC00959216 /* CKCompressedImage.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CKCompressedImage.h; sourceTree = ""; }; - A7686BFD1EC9CFEC00959216 /* CKCompressedImage.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CKCompressedImage.m; sourceTree = ""; }; - FC8E10CD253F8A23006D5AD0 /* CKMockPreview.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = CKMockPreview.h; sourceTree = ""; }; - FC8E10CE253F8A23006D5AD0 /* CKMockPreview.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = CKMockPreview.m; sourceTree = ""; }; + 4620AA682A2BFDBC00BC8929 /* ReactNativeCameraKit-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "ReactNativeCameraKit-Bridging-Header.h"; sourceTree = ""; }; + 4620AA692A2BFDC400BC8929 /* CKOverlayObject.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CKOverlayObject.swift; sourceTree = ""; }; + 4620AA6B2A2C03FC00BC8929 /* CKRatioOverlayView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CKRatioOverlayView.swift; sourceTree = ""; }; + 4620AA6D2A2C090500BC8929 /* CKCompressedImage.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CKCompressedImage.swift; sourceTree = ""; }; + 4620AA6F2A2C4A5F00BC8929 /* CKMockPreview.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CKMockPreview.swift; sourceTree = ""; }; + 4620AA712A2C4FA500BC8929 /* CKCameraManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CKCameraManager.swift; sourceTree = ""; }; + 4620AA732A2C52C300BC8929 /* CKCamera.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CKCamera.swift; sourceTree = ""; }; + 463096872A2C757F002ABA1A /* CKTypes+RCTConvert.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = "CKTypes+RCTConvert.m"; sourceTree = ""; }; + 463096892A2C7D89002ABA1A /* ReactNativeCameraKit.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ReactNativeCameraKit.h; sourceTree = ""; }; + 4630968A2A2D5423002ABA1A /* CKTypes.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CKTypes.swift; sourceTree = ""; }; /* End PBXFileReference section */ /* Begin PBXFrameworksBuildPhase section */ @@ -73,18 +78,19 @@ 264693501CFB2A6B00F3A740 /* ReactNativeCameraKit */ = { isa = PBXGroup; children = ( - A7686BFC1EC9CFEC00959216 /* CKCompressedImage.h */, - A7686BFD1EC9CFEC00959216 /* CKCompressedImage.m */, - 26550AF41CFC7086007FF2DF /* CKCameraManager.h */, 26550AF51CFC7086007FF2DF /* CKCameraManager.m */, + 4620AA712A2C4FA500BC8929 /* CKCameraManager.swift */, 2685AA221CFD89A300E4A446 /* CKCamera.h */, 2685AA231CFD89A300E4A446 /* CKCamera.m */, - 269292811D3B7D6000E07DDF /* CKCameraOverlayView.h */, - 269292821D3B7D6000E07DDF /* CKCameraOverlayView.m */, - 269292841D3B81C800E07DDF /* CKOverlayObject.h */, - 269292851D3B81C800E07DDF /* CKOverlayObject.m */, - FC8E10CD253F8A23006D5AD0 /* CKMockPreview.h */, - FC8E10CE253F8A23006D5AD0 /* CKMockPreview.m */, + 463096872A2C757F002ABA1A /* CKTypes+RCTConvert.m */, + 4630968A2A2D5423002ABA1A /* CKTypes.swift */, + 4620AA732A2C52C300BC8929 /* CKCamera.swift */, + 4620AA6D2A2C090500BC8929 /* CKCompressedImage.swift */, + 4620AA6B2A2C03FC00BC8929 /* CKRatioOverlayView.swift */, + 4620AA692A2BFDC400BC8929 /* CKOverlayObject.swift */, + 4620AA6F2A2C4A5F00BC8929 /* CKMockPreview.swift */, + 4620AA682A2BFDBC00BC8929 /* ReactNativeCameraKit-Bridging-Header.h */, + 463096892A2C7D89002ABA1A /* ReactNativeCameraKit.h */, ); path = ReactNativeCameraKit; sourceTree = ""; @@ -120,7 +126,7 @@ TargetAttributes = { 2646934D1CFB2A6B00F3A740 = { CreatedOnToolsVersion = 7.3; - LastSwiftMigration = 1200; + LastSwiftMigration = 1430; }; }; }; @@ -147,12 +153,16 @@ isa = PBXSourcesBuildPhase; buildActionMask = 2147483647; files = ( + 463096882A2C757F002ABA1A /* CKTypes+RCTConvert.m in Sources */, + 4620AA742A2C52C300BC8929 /* CKCamera.swift in Sources */, + 4620AA702A2C4A5F00BC8929 /* CKMockPreview.swift in Sources */, + 4630968B2A2D5423002ABA1A /* CKTypes.swift in Sources */, + 4620AA6C2A2C03FC00BC8929 /* CKRatioOverlayView.swift in Sources */, 26550AF61CFC7086007FF2DF /* CKCameraManager.m in Sources */, - A7686BFE1EC9CFEC00959216 /* CKCompressedImage.m in Sources */, - 269292861D3B81C800E07DDF /* CKOverlayObject.m in Sources */, + 4620AA722A2C4FA500BC8929 /* CKCameraManager.swift in Sources */, + 4620AA6E2A2C090500BC8929 /* CKCompressedImage.swift in Sources */, + 4620AA6A2A2BFDC400BC8929 /* CKOverlayObject.swift in Sources */, 2685AA241CFD89A300E4A446 /* CKCamera.m in Sources */, - 269292831D3B7D6000E07DDF /* CKCameraOverlayView.m in Sources */, - FC8E10CF253F8A23006D5AD0 /* CKMockPreview.m in Sources */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -251,6 +261,9 @@ OTHER_LDFLAGS = "-ObjC"; PRODUCT_NAME = "$(TARGET_NAME)"; SKIP_INSTALL = YES; + SWIFT_OBJC_BRIDGING_HEADER = "ReactNativeCameraKit/ReactNativeCameraKit-Bridging-Header.h"; + SWIFT_OPTIMIZATION_LEVEL = "-Onone"; + SWIFT_VERSION = 5.0; }; name = Debug; }; @@ -264,6 +277,8 @@ OTHER_LDFLAGS = "-ObjC"; PRODUCT_NAME = "$(TARGET_NAME)"; SKIP_INSTALL = YES; + SWIFT_OBJC_BRIDGING_HEADER = "ReactNativeCameraKit/ReactNativeCameraKit-Bridging-Header.h"; + SWIFT_VERSION = 5.0; }; name = Release; }; diff --git a/ios/ReactNativeCameraKit/CKCamera.h b/ios/ReactNativeCameraKit/CKCamera.h index cafa024fa0..d5f200901e 100644 --- a/ios/ReactNativeCameraKit/CKCamera.h +++ b/ios/ReactNativeCameraKit/CKCamera.h @@ -11,63 +11,6 @@ typedef void (^CaptureBlock)(NSDictionary *imageObject); typedef void (^CallbackBlock)(BOOL success); -typedef NS_ENUM(NSInteger, CKCameraType) { - CKCameraTypeBack, - CKCameraTypeFront, -}; - -@interface RCTConvert(CKCameraType) - -+ (CKCameraType)CKCameraType:(id)json; - -@end - -typedef NS_ENUM(NSInteger, CKCameraFlashMode) { - CKCameraFlashModeAuto, - CKCameraFlashModeOn, - CKCameraFlashModeOff -}; - -@interface RCTConvert(CKCameraFlashMode) - -+ (CKCameraFlashMode)CKCameraFlashMode:(id)json; - -@end - -typedef NS_ENUM(NSInteger, CKCameraTorchMode) { - CKCameraTorchModeOn, - CKCameraTorchModeOff -}; - -@interface RCTConvert(CKCameraTorchMode) - -+ (CKCameraTorchMode)CKCameraTorchMode:(id)json; - -@end - -typedef NS_ENUM(NSInteger, CKCameraFocusMode) { - CKCameraFocusModeOn, - CKCameraFocusModeOff, -}; - -@interface RCTConvert(CKCameraFocusMode) - -+ (CKCameraFocusMode)CKCameraFocusMode:(id)json; - -@end - -typedef NS_ENUM(NSInteger, CKCameraZoomMode) { - CKCameraZoomModeOn, - CKCameraZoomModeOff, -}; - -@interface RCTConvert(CKCameraZoomMode) - -+ (CKCameraZoomMode)CKCameraZoomMode:(id)json; - -@end - - @interface CKCamera : UIView @property (nonatomic, readonly) AVCaptureDeviceInput *videoDeviceInput; diff --git a/ios/ReactNativeCameraKit/CKCamera.m b/ios/ReactNativeCameraKit/CKCamera.m index 2a2f15ada8..cc96cd3c02 100644 --- a/ios/ReactNativeCameraKit/CKCamera.m +++ b/ios/ReactNativeCameraKit/CKCamera.m @@ -4,14 +4,15 @@ #if __has_include() #import #import +#import #else #import "UIView+React.h" #import "RCTConvert.h" +#import "RCTViewManager.h" #endif #import "CKCamera.h" -#import "CKCameraOverlayView.h" -#import "CKMockPreview.h" +#import "ReactNativeCameraKit-Swift.h" AVCaptureVideoOrientation AVCaptureVideoOrientationFromInterfaceOrientation(UIInterfaceOrientation orientation){ if (orientation == UIInterfaceOrientationPortrait) { @@ -36,49 +37,6 @@ typedef NS_ENUM( NSInteger, CKSetupResult ) { CKSetupResultSessionConfigurationFailed }; -@implementation RCTConvert(CKCameraType) - -RCT_ENUM_CONVERTER(CKCameraType, (@{ - @"back": @(AVCaptureDevicePositionBack), - @"front": @(AVCaptureDevicePositionFront), - }), AVCaptureDevicePositionBack, integerValue) -@end - -@implementation RCTConvert(CKCameraTorchMode) - -RCT_ENUM_CONVERTER(CKCameraTorchMode, (@{ - @"on": @(AVCaptureTorchModeOn), - @"off": @(AVCaptureTorchModeOff) - }), AVCaptureTorchModeAuto, integerValue) -@end - -@implementation RCTConvert(CKCameraFlashMode) - -RCT_ENUM_CONVERTER(CKCameraFlashMode, (@{ - @"auto": @(AVCaptureFlashModeAuto), - @"on": @(AVCaptureFlashModeOn), - @"off": @(AVCaptureFlashModeOff) - }), AVCaptureFlashModeAuto, integerValue) - -@end - -@implementation RCTConvert(CKCameraFocusMode) - -RCT_ENUM_CONVERTER(CKCameraFocusMode, (@{ - @"on": @(CKCameraFocusModeOn), - @"off": @(CKCameraFocusModeOff) - }), CKCameraFocusModeOn, integerValue) - -@end - -@implementation RCTConvert(CKCameraZoomMode) - -RCT_ENUM_CONVERTER(CKCameraZoomMode, (@{ - @"on": @(CKCameraZoomModeOn), - @"off": @(CKCameraZoomModeOff) - }), CKCameraZoomModeOn, integerValue) - -@end @interface CKCamera () @@ -87,7 +45,7 @@ @interface CKCamera () @property (nonatomic, strong) CKMockPreview *mockPreview; @property (nonatomic, strong) UIView *focusView; @property (nonatomic, strong) NSTimer *focusViewTimer; -@property (nonatomic, strong) CKCameraOverlayView *cameraOverlayView; +@property (nonatomic, strong) CKRatioOverlayView *ratioOverlayView; @property (nonatomic, strong) NSTimer *focusResetTimer; @property (nonatomic) BOOL startFocusResetTimerAfterFocusing; @@ -121,9 +79,9 @@ @interface CKCamera () @property (nonatomic) UIView * dataReadingFrame; // camera options -@property (nonatomic) AVCaptureDevicePosition cameraType; -@property (nonatomic) AVCaptureFlashMode flashMode; -@property (nonatomic) AVCaptureTorchMode torchMode; +@property (nonatomic) CKCameraType cameraType; +@property (nonatomic) CKCameraFlashMode flashMode; +@property (nonatomic) CKCameraTorchMode torchMode; @property (nonatomic) CKCameraFocusMode focusMode; @property (nonatomic) CKCameraZoomMode zoomMode; @property (nonatomic, strong) NSString* ratioOverlay; @@ -209,7 +167,7 @@ - (instancetype)initWithFrame:(CGRect)frame { self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill; [self setupCaptureSession]; #endif - + UIView *focusView = [[UIView alloc] initWithFrame:CGRectZero]; focusView.backgroundColor = [UIColor clearColor]; focusView.layer.borderColor = [UIColor yellowColor].CGColor; @@ -223,7 +181,7 @@ - (instancetype)initWithFrame:(CGRect)frame { self.zoomMode = CKCameraZoomModeOn; self.flashMode = CKCameraFlashModeAuto; self.focusMode = CKCameraFocusModeOn; - + self.frameColor = [UIColor whiteColor]; self.laserColor = [UIColor redColor]; self.frameOffset = 30; @@ -233,26 +191,27 @@ - (instancetype)initWithFrame:(CGRect)frame { return self; } -- (void)setCameraType:(AVCaptureDevicePosition)cameraType { +- (void)setCameraType:(CKCameraType)cameraType { if (cameraType != _cameraType) { _cameraType = cameraType; - [self changeCamera:cameraType]; + [self changeCamera:[EnumHelper cameraTypeToAVPosition: cameraType]]; } } -- (void)setFlashMode:(AVCaptureFlashMode)flashMode { +- (void)setFlashMode:(CKCameraFlashMode)flashMode { if (flashMode != _flashMode) { _flashMode = flashMode; - [CKCamera setFlashMode:flashMode forDevice:self.videoDeviceInput.device]; + [CKCamera setFlashMode:[EnumHelper flashModeToAVFlashMode: flashMode] forDevice:self.videoDeviceInput.device]; } } --(void)setTorchMode:(AVCaptureTorchMode)torchMode { +-(void)setTorchMode:(CKCameraTorchMode)torchMode { _torchMode = torchMode; - if (self.videoDeviceInput && [self.videoDeviceInput.device isTorchModeSupported:torchMode] && self.videoDeviceInput.device.hasTorch) { + AVCaptureTorchMode avTorchMode = [EnumHelper torchModeToAVTorchMode: torchMode]; + if (self.videoDeviceInput && [self.videoDeviceInput.device isTorchModeSupported:avTorchMode] && self.videoDeviceInput.device.hasTorch) { NSError* err = nil; if ( [self.videoDeviceInput.device lockForConfiguration:&err] ) { - [self.videoDeviceInput.device setTorchMode:torchMode]; + [self.videoDeviceInput.device setTorchMode:avTorchMode]; [self.videoDeviceInput.device unlockForConfiguration]; } } @@ -330,7 +289,7 @@ - (void) orientationChanged:(NSNotification *)notification // LANDSCAPE_LEFT: 1, // ⬅️ // PORTRAIT_UPSIDE_DOWN: 2, // ⬇️ // LANDSCAPE_RIGHT: 3, // ➡️ - + UIDevice * device = notification.object; UIDeviceOrientation orientation = device.orientation; if (orientation == UIDeviceOrientationPortrait) { @@ -366,7 +325,7 @@ - (void) setupCaptureSession { if ( [self.session canAddInput:videoDeviceInput] ) { [self.session addInput:videoDeviceInput]; self.videoDeviceInput = videoDeviceInput; - [CKCamera setFlashMode:self.flashMode forDevice:self.videoDeviceInput.device]; + [CKCamera setFlashMode:[EnumHelper flashModeToAVFlashMode: self.flashMode] forDevice:self.videoDeviceInput.device]; } else { self.setupResult = CKSetupResultSessionConfigurationFailed; @@ -450,7 +409,7 @@ -(void)handleCameraPermission { -(void)reactSetFrame:(CGRect)frame { [super reactSetFrame:frame]; - + self.previewLayer.frame = self.bounds; #if TARGET_IPHONE_SIMULATOR @@ -509,14 +468,14 @@ -(void)reactSetFrame:(CGRect)frame { -(void)setRatioOverlay:(NSString *)ratioOverlay { _ratioOverlay = ratioOverlay; - [self.cameraOverlayView setRatio:self.ratioOverlay]; + [self.ratioOverlayView setRatio:self.ratioOverlay]; } -(void)setOverlayRatioView { if (self.ratioOverlay) { - [self.cameraOverlayView removeFromSuperview]; - self.cameraOverlayView = [[CKCameraOverlayView alloc] initWithFrame:self.bounds ratioString:self.ratioOverlay overlayColor:self.ratioOverlayColor]; - [self addSubview:self.cameraOverlayView]; + [self.ratioOverlayView removeFromSuperview]; + self.ratioOverlayView = [[CKRatioOverlayView alloc] initWithFrame:self.bounds ratioString:self.ratioOverlay overlayColor:self.ratioOverlayColor]; + [self addSubview:self.ratioOverlayView]; } } @@ -544,12 +503,12 @@ + (AVCaptureDevice *)deviceWithMediaType:(NSString *)mediaType preferringPositio - (void)snapStillImage:(NSDictionary*)options success:(CaptureBlock)onSuccess onError:(void (^)(NSString*))onError { - + #if TARGET_IPHONE_SIMULATOR [self capturePreviewLayer:options success:onSuccess onError:onError]; return; #endif - + dispatch_async( self.sessionQueue, ^{ AVCaptureConnection *connection = [self.stillImageOutput connectionWithMediaType:AVMediaTypeVideo]; @@ -584,7 +543,7 @@ - (void)snapStillImage:(NSDictionary*)options success:(CaptureBlock)onSuccess on // The sample buffer is not retained. Create image data before saving the still image to the photo library asynchronously. NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer]; - + [self writeCapturedImageData:imageData onSuccess:onSuccess onError:onError]; [self resetFocus]; }]; @@ -649,7 +608,7 @@ - (void)changeCamera:(AVCaptureDevicePosition)preferredPosition if ( [self.session canAddInput:videoDeviceInput] ) { [[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:currentVideoDevice]; - [CKCamera setFlashMode:self.flashMode forDevice:videoDevice]; + [CKCamera setFlashMode:[EnumHelper flashModeToAVFlashMode: self.flashMode] forDevice:videoDevice]; [self.session addInput:videoDeviceInput]; self.videoDeviceInput = videoDeviceInput; diff --git a/ios/ReactNativeCameraKit/CKCamera.swift b/ios/ReactNativeCameraKit/CKCamera.swift new file mode 100644 index 0000000000..baa6064a40 --- /dev/null +++ b/ios/ReactNativeCameraKit/CKCamera.swift @@ -0,0 +1,121 @@ +// +// CKCamera.swift +// ReactNativeCameraKit +// + +import AVFoundation +import UIKit + +class CKCamera1: UIView, AVCaptureMetadataOutputObjectsDelegate { + let label = UILabel() + + private var cameraType: CameraType? + private var flashMode: FlashMode? + private var torchMode: TorchMode? + private var focusMode: FocusMode? + private var zoomMode: ZoomMode? + private var ratioOverlay: String? + private var ratioOverlayColor: UIColor? + + // Barcode + private var onReadCode: RCTDirectEventBlock? + private var showFrame: Bool? + private var laserColor: UIColor? + private var frameColor: UIColor? + + private var onOrientationChange: RCTDirectEventBlock? + + private var resetFocusTimeout: Int? + private var resetFocusWhenMotionDetected: Bool? + + override init(frame: CGRect) { + super.init(frame: frame) + + label.text = "Hello world" + label.frame = CGRect(x: 0, y: 0, width: 100, height: 100) + addSubview(label) + + backgroundColor = .red + } + + @available(*, unavailable) + required init?(coder aDecoder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + func snapStillImage(_ options: [String: Any], success: (_ imageObject: [String: Any]) -> (), onError:(_ error: String) -> ()) { + success(["uri":"SUCCESS!", "name":"OHOH!"]) + } + + // MARK: AVCaptureMetadataOutputObjectsDelegate + + func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) { + + } + + // MARK: Objective C setter + + @objc func setCameraType(_ cameraType: CameraType) { + self.cameraType = cameraType + } + + @objc func setFlashMode(_ flashMode: FlashMode) { + onReadCode?(["codeStringValue":"SUCCESS!", "name":"OHOH!"]) + + self.flashMode = flashMode + } + + @objc func setTorchMode(_ torchMode: TorchMode) { + if torchMode == .off { + backgroundColor = .red + } else { + backgroundColor = .green + } + + self.torchMode = torchMode + } + + @objc func setFocusMode(_ focusMode: FocusMode) { + self.focusMode = focusMode + } + + @objc func setZoomMode(_ zoomMode: ZoomMode) { + self.zoomMode = zoomMode + } + + @objc func setRatioOverlayColor(_ ratioOverlayColor: UIColor) { + self.ratioOverlayColor = ratioOverlayColor + } + + @objc func setRatioOverlay(_ ratioOverlay: String) { + self.ratioOverlay = ratioOverlay + } + + @objc func setOnReadCode(_ onReadCode: @escaping RCTDirectEventBlock) { + self.onReadCode = onReadCode + } + + @objc func setShowFrame(_ showFrame: Bool) { + self.showFrame = showFrame + } + + @objc func setLaserColor(_ laserColor: UIColor) { + self.laserColor = laserColor + } + + @objc func setFrameColor(_ frameColor: UIColor) { + self.frameColor = frameColor + } + + @objc func setOnOrientationChange(_ onOrientationChange: @escaping RCTDirectEventBlock) { + self.onOrientationChange = onOrientationChange + } + + @objc func setResetFocusTimeout(_ resetFocusTimeout: Int) { + self.resetFocusTimeout = resetFocusTimeout + } + + @objc func setResetFocusWhenMotionDetected(_ resetFocusWhenMotionDetected: Bool) { + self.resetFocusWhenMotionDetected = resetFocusWhenMotionDetected + } +} diff --git a/ios/ReactNativeCameraKit/CKCameraManager.h b/ios/ReactNativeCameraKit/CKCameraManager.h deleted file mode 100644 index 80362aa8c3..0000000000 --- a/ios/ReactNativeCameraKit/CKCameraManager.h +++ /dev/null @@ -1,15 +0,0 @@ -@import AVFoundation; - -#if __has_include() -#import -#import -#else -#import "RCTViewManager.h" -#import "RCTConvert.h" -#endif - - - -@interface CKCameraManager : RCTViewManager - -@end diff --git a/ios/ReactNativeCameraKit/CKCameraManager.m b/ios/ReactNativeCameraKit/CKCameraManager.m index 885f93e8bc..9bc98a392f 100644 --- a/ios/ReactNativeCameraKit/CKCameraManager.m +++ b/ios/ReactNativeCameraKit/CKCameraManager.m @@ -1,21 +1,19 @@ -#import "CKCameraManager.h" -#import "CKCamera.h" +// +// CKCameraManager.m +// ReactNativeCameraKit +// +@import AVFoundation; -@interface CKCameraManager () +#if __has_include() +#import +#import +#else +#import "RCTViewManager.h" +#import "RCTConvert.h" +#endif -@property (nonatomic, strong) CKCamera *camera; - -@end - -@implementation CKCameraManager - -RCT_EXPORT_MODULE() - -- (UIView *)view { - self.camera = [CKCamera new]; - return self.camera; -} +@interface RCT_EXTERN_MODULE(CKCameraManager, RCTViewManager) RCT_EXPORT_VIEW_PROPERTY(cameraType, CKCameraType) RCT_EXPORT_VIEW_PROPERTY(flashMode, CKCameraFlashMode) @@ -33,51 +31,16 @@ - (UIView *)view { RCT_EXPORT_VIEW_PROPERTY(resetFocusTimeout, NSInteger) RCT_EXPORT_VIEW_PROPERTY(resetFocusWhenMotionDetected, BOOL) -RCT_EXPORT_METHOD(capture:(NSDictionary*)options +RCT_EXTERN_METHOD(capture:(NSDictionary*)options resolve:(RCTPromiseResolveBlock)resolve - reject:(RCTPromiseRejectBlock)reject) { - - [self.camera snapStillImage:options success:^(NSDictionary *imageObject) { - resolve(imageObject); - } onError:^(NSString* error) { - reject(@"capture_error", error, nil); - }]; -} - -RCT_EXPORT_METHOD(setTorchMode:(NSString*)mode) { - AVCaptureTorchMode torchMode; - if([mode isEqualToString:@"on"]) { - torchMode = AVCaptureTorchModeOn; - } else { - torchMode = AVCaptureTorchModeOff; - } - - [self.camera setTorchMode:torchMode ]; -} - -RCT_EXPORT_METHOD(checkDeviceCameraAuthorizationStatus:(RCTPromiseResolveBlock)resolve - reject:(__unused RCTPromiseRejectBlock)reject) { - + reject:(RCTPromiseRejectBlock)reject) - AVAuthorizationStatus authStatus = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo]; - if(authStatus == AVAuthorizationStatusAuthorized) { - resolve(@YES); - } else if(authStatus == AVAuthorizationStatusNotDetermined) { - resolve(@(-1)); - } else { - resolve(@NO); - } -} +RCT_EXTERN_METHOD(setTorchMode:(NSString*)mode) -RCT_EXPORT_METHOD(requestDeviceCameraAuthorization:(RCTPromiseResolveBlock)resolve - reject:(__unused RCTPromiseRejectBlock)reject) { - __block NSString *mediaType = AVMediaTypeVideo; +RCT_EXTERN_METHOD(checkDeviceCameraAuthorizationStatus:(RCTPromiseResolveBlock)resolve + reject:(__unused RCTPromiseRejectBlock)reject) - [AVCaptureDevice requestAccessForMediaType:mediaType completionHandler:^(BOOL granted) { - if (resolve) { - resolve(@(granted)); - } - }]; -} +RCT_EXTERN_METHOD(requestDeviceCameraAuthorization:(RCTPromiseResolveBlock)resolve + reject:(__unused RCTPromiseRejectBlock)reject) @end diff --git a/ios/ReactNativeCameraKit/CKCameraManager.swift b/ios/ReactNativeCameraKit/CKCameraManager.swift new file mode 100644 index 0000000000..914d124d96 --- /dev/null +++ b/ios/ReactNativeCameraKit/CKCameraManager.swift @@ -0,0 +1,48 @@ +// +// CKCameraManager.swift +// ReactNativeCameraKit +// + +import AVFoundation +import Foundation + +@objc(CKCameraManager) public class CKCameraManager: RCTViewManager { + var camera: CKCamera! + + override public static func requiresMainQueueSetup() -> Bool { + return true + } + + override public func view() -> UIView! { + camera = CKCamera() + + return camera + } + + @objc func capture(_ options: NSDictionary, + resolve: @escaping RCTPromiseResolveBlock, + reject: @escaping RCTPromiseRejectBlock) { + camera.snapStillImage(options as! [String: Any], + success: { resolve($0) }, + onError: { reject("capture_error", $0, nil) }) + } + + @objc func setTorchMode(_ modeString: String) { + let mode = TorchMode(from: modeString) + camera.setTorchMode(mode.avTorchMode) + } + + @objc func checkDeviceCameraAuthorizationStatus(_ resolve: @escaping RCTPromiseResolveBlock, + reject: @escaping RCTPromiseRejectBlock) { + switch AVCaptureDevice.authorizationStatus(for: .video) { + case .authorized: resolve(true) + case .notDetermined: resolve(-1) + default: resolve(false) + } + } + + @objc func requestDeviceCameraAuthorization(_ resolve: @escaping RCTPromiseResolveBlock, + reject: @escaping RCTPromiseRejectBlock) { + AVCaptureDevice.requestAccess(for: .video, completionHandler: { resolve($0) }) + } +} diff --git a/ios/ReactNativeCameraKit/CKCameraOverlayView.h b/ios/ReactNativeCameraKit/CKCameraOverlayView.h deleted file mode 100644 index 83a4e579bb..0000000000 --- a/ios/ReactNativeCameraKit/CKCameraOverlayView.h +++ /dev/null @@ -1,16 +0,0 @@ -#import -#import "CKOverlayObject.h" - -@interface CKCameraOverlayView : UIView - - -@property (nonatomic, strong, readonly) UIView *centerView; -@property (nonatomic, strong, readonly) CKOverlayObject *overlayObject; - - - --(instancetype)initWithFrame:(CGRect)frame ratioString:(NSString*)ratioString overlayColor:(UIColor*)overlayColor; - --(void)setRatio:(NSString*)ratio; - -@end diff --git a/ios/ReactNativeCameraKit/CKCameraOverlayView.m b/ios/ReactNativeCameraKit/CKCameraOverlayView.m deleted file mode 100644 index 2bf596836d..0000000000 --- a/ios/ReactNativeCameraKit/CKCameraOverlayView.m +++ /dev/null @@ -1,100 +0,0 @@ -#import "CKCameraOverlayView.h" - - -@interface CKCameraOverlayView () - -@property (nonatomic, strong, readwrite) CKOverlayObject *overlayObject; -@property (nonatomic, strong) UIView *topView; -@property (nonatomic, strong, readwrite) UIView *centerView; -@property (nonatomic, strong) UIView *bottomView; - - -@end - -@implementation CKCameraOverlayView - - - --(instancetype)initWithFrame:(CGRect)frame ratioString:(NSString*)ratioString overlayColor:(UIColor*)overlayColor { - - self = [super initWithFrame:frame]; - - if (self) { - - self.overlayObject = [[CKOverlayObject alloc] initWithString:ratioString]; - self.topView = [[UIView alloc] initWithFrame:CGRectZero]; - self.centerView = [[UIView alloc] initWithFrame:CGRectZero]; - self.bottomView = [[UIView alloc] initWithFrame:CGRectZero]; - - overlayColor = overlayColor ? overlayColor : [UIColor colorWithRed:0 green:0 blue:0 alpha:0.3]; - - self.topView.backgroundColor = overlayColor; - self.bottomView.backgroundColor = overlayColor; - - [self addSubview:self.topView]; - [self addSubview:self.centerView]; - [self addSubview:self.bottomView]; - - [self setOverlayParts]; - } - - return self; -} - - --(void)setOverlayParts { - - if (self.overlayObject.ratio == 0) return; - - CGSize centerSize = CGSizeZero; - CGSize sideSize = CGSizeZero; - - if (self.overlayObject.width < self.overlayObject.height) { - - centerSize.width = self.frame.size.width; - centerSize.height = self.frame.size.height * self.overlayObject.ratio; - - sideSize.width = centerSize.width; - sideSize.height = (self.frame.size.height - centerSize.height)/2.0; - - self.topView.frame = CGRectMake(0, 0, sideSize.width, sideSize.height); - self.centerView.frame = CGRectMake(0, self.topView.frame.size.height + self.topView.frame.origin.y, centerSize.width, centerSize.height); - self.bottomView.frame = CGRectMake(0, self.centerView.frame.size.height + self.centerView.frame.origin.y, sideSize.width, sideSize.height); - } - else if (self.overlayObject.width > self.overlayObject.height){ - centerSize.width = self.frame.size.width / self.overlayObject.ratio; - centerSize.height = self.frame.size.height; - - sideSize.width = (self.frame.size.width - centerSize.width)/2.0; - sideSize.height = centerSize.height; - - self.topView.frame = CGRectMake(0, 0, sideSize.width, sideSize.height); - self.centerView.frame = CGRectMake(self.topView.frame.size.width + self.topView.frame.origin.x, 0, centerSize.width, centerSize.height); - self.bottomView.frame = CGRectMake(self.centerView.frame.size.width + self.centerView.frame.origin.x, 0, sideSize.width, sideSize.height); - } - else { // ratio is 1:1 - centerSize.width = self.frame.size.width; - centerSize.height = self.frame.size.width; - - sideSize.width = centerSize.width; - sideSize.height = (self.frame.size.height - centerSize.height)/2.0; - - self.topView.frame = CGRectMake(0, 0, sideSize.width, sideSize.height); - self.centerView.frame = CGRectMake(0, self.topView.frame.size.height + self.topView.frame.origin.y, centerSize.width, centerSize.height); - self.bottomView.frame = CGRectMake(0, self.centerView.frame.size.height + self.centerView.frame.origin.y, sideSize.width, sideSize.height); - } -} - - --(void)setRatio:(NSString*)ratio { - self.overlayObject = [[CKOverlayObject alloc] initWithString:ratio]; - -// self.alpha =0; - [UIView animateWithDuration:0.2 animations:^{ - [self setOverlayParts]; - } completion:nil]; - -} - - -@end diff --git a/ios/ReactNativeCameraKit/CKCompressedImage.h b/ios/ReactNativeCameraKit/CKCompressedImage.h deleted file mode 100644 index 311fd453bd..0000000000 --- a/ios/ReactNativeCameraKit/CKCompressedImage.h +++ /dev/null @@ -1,10 +0,0 @@ -#import - -@interface CKCompressedImage : NSObject - -- (instancetype)initWithImage:(UIImage *)image imageQuality:(NSString*)imageQuality; - -@property (nonatomic, readonly) UIImage *image; -@property (nonatomic, readonly) NSData *data; - -@end diff --git a/ios/ReactNativeCameraKit/CKCompressedImage.m b/ios/ReactNativeCameraKit/CKCompressedImage.m deleted file mode 100644 index a453160887..0000000000 --- a/ios/ReactNativeCameraKit/CKCompressedImage.m +++ /dev/null @@ -1,40 +0,0 @@ -#import "CKCompressedImage.h" - -@implementation CKCompressedImage - -- (instancetype)initWithImage:(UIImage *)image imageQuality:(NSString*)imageQuality -{ - if(self = [super init]) { - CGFloat max = 1200.0f; - if ([imageQuality isEqualToString:@"high"]) { - max = 1200.0f; - } - else if ([imageQuality isEqualToString:@"medium"]) { - max = 800.0f; - } - else { - _image = image; - _data = UIImageJPEGRepresentation(image, 1.0f); - } - float actualHeight = image.size.height; - float actualWidth = image.size.width; - - float imgRatio = actualWidth/actualHeight; - - float newHeight = (actualHeight > actualWidth) ? max : max/imgRatio; - float newWidth = (actualHeight > actualWidth) ? max*imgRatio : max; - - - CGRect rect = CGRectMake(0.0, 0.0, newWidth, newHeight); - UIGraphicsBeginImageContext(rect.size); - [image drawInRect:rect]; - _image = UIGraphicsGetImageFromCurrentImageContext(); - UIGraphicsEndImageContext(); - _data = UIImageJPEGRepresentation(_image, 0.85f); - } - - return self; -} - - -@end diff --git a/ios/ReactNativeCameraKit/CKCompressedImage.swift b/ios/ReactNativeCameraKit/CKCompressedImage.swift new file mode 100644 index 0000000000..8535f9f350 --- /dev/null +++ b/ios/ReactNativeCameraKit/CKCompressedImage.swift @@ -0,0 +1,51 @@ +// +// CKCompressedImage.swift +// ReactNativeCameraKit +// + +import UIKit + +enum ImageQuality: String { + case high + case medium + case original + + init(from string: String) { + self = ImageQuality(rawValue: string) ?? .original + } +} + +struct CKCompressedImage { + let image: UIImage + let data: Data? + + init(inputImage: UIImage, imageQuality: ImageQuality) { + var max: CGFloat = 1200.0 + + switch imageQuality { + case .high: + max = 1200.0 + case .medium: + max = 800.0 + case .original: + image = inputImage + data = inputImage.jpegData(compressionQuality: 1.0) + return + } + + let actualHeight = inputImage.size.height + let actualWidth = inputImage.size.width + + let imgRatio = actualWidth / actualHeight + + let newHeight = (actualHeight > actualWidth) ? max : max / imgRatio + let newWidth = (actualHeight > actualWidth) ? max * imgRatio : max + + let rect = CGRect(x: 0.0, y: 0.0, width: newWidth, height: newHeight) + UIGraphicsBeginImageContext(rect.size) + inputImage.draw(in: rect) + image = UIGraphicsGetImageFromCurrentImageContext()! + UIGraphicsEndImageContext() + self.data = image.jpegData(compressionQuality: 0.85) + } +} diff --git a/ios/ReactNativeCameraKit/CKMockPreview.h b/ios/ReactNativeCameraKit/CKMockPreview.h deleted file mode 100644 index c4591cb184..0000000000 --- a/ios/ReactNativeCameraKit/CKMockPreview.h +++ /dev/null @@ -1,19 +0,0 @@ -// -// CKMockPreview.h -// ReactNativeCameraKit -// -// Created by Aaron Grider on 10/20/20. -// - -#import - -NS_ASSUME_NONNULL_BEGIN - -@interface CKMockPreview : UIView - -- (UIImage *)snapshotWithTimestamp:(BOOL)showTimestamp; -- (void)randomize; - -@end - -NS_ASSUME_NONNULL_END diff --git a/ios/ReactNativeCameraKit/CKMockPreview.m b/ios/ReactNativeCameraKit/CKMockPreview.m deleted file mode 100644 index b38bb6efcf..0000000000 --- a/ios/ReactNativeCameraKit/CKMockPreview.m +++ /dev/null @@ -1,135 +0,0 @@ -// -// CKMockPreview.m -// ReactNativeCameraKit -// -// Created by Aaron Grider on 10/20/20. -// - -#import "CKMockPreview.h" - -@implementation CKMockPreview - -- (id)initWithFrame:(CGRect) frame { - self = [super initWithFrame:frame]; - if (self) { - [self commonInit]; - } - return self; -} - -- (void)commonInit { - self.layer.cornerRadius = 10.0f; - self.layer.masksToBounds = YES; -} - -- (void)layoutSubviews { - [super layoutSubviews]; - [self randomize]; -} - -- (void)randomize { - self.layer.backgroundColor = [UIColor colorWithHue:drand48() saturation:1.0 brightness:1.0 alpha:1.0].CGColor; - self.layer.sublayers = nil; - - for (int i = 0; i < 5; i++) { - [self drawBalloon]; - } -} - -- (void)drawBalloon { - int stringLength = 200; - CGFloat radius = [CKMockPreview randomNumberBetween:50 maxNumber:150]; - int x = arc4random_uniform(self.frame.size.width); - int y = arc4random_uniform(self.frame.size.height + radius + stringLength); - int stretch = radius / 3; - - CALayer *balloon = [CALayer layer]; - balloon.frame = CGRectMake(x - radius, y - radius, radius * 2, radius * 2 + stringLength); - - // Ballon main circle - CAShapeLayer *circle = [CAShapeLayer layer]; - double colorHue = drand48(); - - [circle setPath:[[UIBezierPath bezierPathWithOvalInRect:CGRectMake(0, 0, radius * 2, radius * 2 + stretch)] CGPath]]; - [circle setFillColor:[[UIColor colorWithHue:colorHue saturation:1.0 brightness:0.95 alpha:1.0] CGColor]]; - - // Ballon reflection - CAShapeLayer *reflection = [CAShapeLayer layer]; - [reflection setPath:[[UIBezierPath bezierPathWithOvalInRect:CGRectMake(radius / 2, radius / 2, radius * 0.7, radius * 0.7)] CGPath]]; - [reflection setFillColor:[[UIColor colorWithHue:colorHue saturation:1.0 brightness:1.0 alpha:1.0] CGColor]]; - - // Ballon string - CAShapeLayer *line = [CAShapeLayer layer]; - UIBezierPath *linePath= [UIBezierPath bezierPath]; - CGPoint startPoint = CGPointMake(balloon.frame.size.width / 2, radius * 2); - CGPoint endPoint = CGPointMake(balloon.frame.size.width, (radius * 2) + stringLength); - [linePath moveToPoint: startPoint]; - [linePath addQuadCurveToPoint:endPoint controlPoint:CGPointMake(balloon.frame.size.width / 2, radius * 2 + stringLength / 2)]; - line.path = linePath.CGPath; - line.fillColor = nil; - line.strokeColor = [UIColor darkGrayColor].CGColor; - line.opacity = 1.0; - line.lineWidth = radius * 0.05; - - // Add layers - [balloon addSublayer:line]; - [circle addSublayer:reflection]; - [balloon addSublayer:circle]; - - [self.layer addSublayer:balloon]; - - // Apply animation - CABasicAnimation *scale = [CABasicAnimation animationWithKeyPath:@"transform.scale"]; - [scale setFromValue:[NSNumber numberWithFloat:0.7f]]; - [scale setToValue:[NSNumber numberWithFloat:1.0f]]; - [scale setDuration:10.0f]; - [scale setFillMode:kCAFillModeForwards]; - - scale.removedOnCompletion = NO; - scale.autoreverses= YES; - scale.repeatCount = HUGE_VALF; - - CABasicAnimation *move = [CABasicAnimation animationWithKeyPath:@"position.y"]; - [move setFromValue:[NSNumber numberWithFloat:balloon.frame.origin.y]]; - [move setToValue:[NSNumber numberWithFloat: 0 - balloon.frame.size.height]]; - [move setDuration:[CKMockPreview randomNumberBetween:30 maxNumber:100]]; - - move.removedOnCompletion = NO; - move.repeatCount = HUGE_VALF; - - [balloon addAnimation:scale forKey:@"scale"]; - [balloon addAnimation:move forKey:@"move"]; -} - - -- (UIImage *)snapshotWithTimestamp:(BOOL)showTimestamp { - UIGraphicsBeginImageContextWithOptions(self.bounds.size, NO, 0); - [self drawViewHierarchyInRect:self.bounds afterScreenUpdates:NO]; - UIImage *image = UIGraphicsGetImageFromCurrentImageContext(); - - if (showTimestamp) { - NSDate *date = [NSDate date]; - NSDateFormatter *dateformatter = [[NSDateFormatter alloc] init]; - [dateformatter setDateFormat:@"HH:mm:ss"]; - NSString *stringFromDate = [dateformatter stringFromDate:date]; - UIFont *font = [UIFont boldSystemFontOfSize:20]; - - [image drawInRect:CGRectMake(0,0,image.size.width,image.size.height)]; - CGRect rect = CGRectMake(25, 25, image.size.width, image.size.height); - [[UIColor whiteColor] set]; - [stringFromDate drawInRect:CGRectIntegral(rect) withAttributes:[NSDictionary dictionaryWithObject:font forKey:NSFontAttributeName]]; - - image = UIGraphicsGetImageFromCurrentImageContext(); - - UIGraphicsEndImageContext(); - } - - return image; -} - -+ (NSInteger)randomNumberBetween:(NSInteger)min maxNumber:(NSInteger)max -{ - return min + arc4random_uniform((uint32_t)(max - min + 1)); -} - -@end diff --git a/ios/ReactNativeCameraKit/CKMockPreview.swift b/ios/ReactNativeCameraKit/CKMockPreview.swift new file mode 100644 index 0000000000..627d955a77 --- /dev/null +++ b/ios/ReactNativeCameraKit/CKMockPreview.swift @@ -0,0 +1,132 @@ +// +// CKMockPreview.swift +// ReactNativeCameraKit +// + +import UIKit + +@objc(CKMockPreview) +public class CKMockPreview: UIView { + // MARK: - Public + + @objc + public override init(frame: CGRect) { + super.init(frame: frame) + + layer.cornerRadius = 10.0 + layer.masksToBounds = true + } + + @available(*, unavailable) + required init?(coder aDecoder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + override public func layoutSubviews() { + super.layoutSubviews() + + randomize() + } + + @objc + public func snapshotWithTimestamp(_ showTimestamp: Bool) -> UIImage? { + UIGraphicsBeginImageContextWithOptions(bounds.size, false, 0) + drawHierarchy(in: bounds, afterScreenUpdates: false) + var image = UIGraphicsGetImageFromCurrentImageContext() + + if showTimestamp { + let date = Date() + let dateFormatter = DateFormatter() + dateFormatter.dateFormat = "HH:mm:ss" + let stringFromDate = dateFormatter.string(from: date) + let font = UIFont.boldSystemFont(ofSize: 20) + + image?.draw(in: CGRect(x: 0, y: 0, width: image?.size.width ?? 0, height: image?.size.height ?? 0)) + let rect = CGRect(x: 25, y: 25, width: image?.size.width ?? 0, height: image?.size.height ?? 0) + UIColor.white.set() + let textFontAttributes = [NSAttributedString.Key.font: font] + stringFromDate.draw(in: rect.integral, withAttributes: textFontAttributes) + + image = UIGraphicsGetImageFromCurrentImageContext() + } + + UIGraphicsEndImageContext() + return image + } + + @objc + public func randomize() { + layer.backgroundColor = UIColor(hue: CGFloat(Double.random(in: 0...1)), saturation: 1.0, brightness: 1.0, alpha: 1.0).cgColor + layer.sublayers = nil + + for _ in 0..<5 { + drawBalloon() + } + } + + // MARK: - Private + + private func drawBalloon() { + let stringLength = CGFloat(200) + let radius = CGFloat(Int.random(in: 50...150)) + + let x = CGFloat(Int.random(in: 0...Int(frame.size.width))) + let y = CGFloat(Int.random(in: 0...Int(frame.size.height + radius + stringLength))) + let stretch = radius / 3 + + let balloon = CALayer() + balloon.frame = CGRect(x: x - radius, y: y - radius, width: radius * 2, height: radius * 2 + stringLength) + + // Balloon main circle + let circle = CAShapeLayer() + let colorHue = Double.random(in: 0...1) + + circle.path = UIBezierPath(ovalIn: CGRect(x: 0, y: 0, width: radius * 2, height: radius * 2 + stretch)).cgPath + circle.fillColor = UIColor(hue: colorHue, saturation: 1.0, brightness: 0.95, alpha: 1.0).cgColor + + // Balloon reflection + let reflection = CAShapeLayer() + reflection.path = UIBezierPath(ovalIn: CGRect(x: radius / 2, y: radius / 2, width: radius * 0.7, height: radius * 0.7)).cgPath + reflection.fillColor = UIColor(hue: colorHue, saturation: 1.0, brightness: 1.0, alpha: 1.0).cgColor + + // Balloon string + let line = CAShapeLayer() + let linePath = UIBezierPath() + let startPoint = CGPoint(x: balloon.frame.size.width / 2, y: radius * 2) + let endPoint = CGPoint(x: balloon.frame.size.width, y: (radius * 2) + stringLength) + linePath.move(to: startPoint) + linePath.addQuadCurve(to: endPoint, controlPoint: CGPoint(x: balloon.frame.size.width / 2, y: radius * 2 + stringLength / 2)) + line.path = linePath.cgPath + line.fillColor = nil + line.strokeColor = UIColor.darkGray.cgColor + line.opacity = 1.0 + line.lineWidth = radius * 0.05 + + // Add layers + balloon.addSublayer(line) + circle.addSublayer(reflection) + balloon.addSublayer(circle) + + layer.addSublayer(balloon) + + // Apply animation + let scale = CABasicAnimation(keyPath: "transform.scale") + scale.fromValue = NSNumber(value: 0.7) + scale.toValue = NSNumber(value: 1.0) + scale.duration = 10.0 + scale.fillMode = .forwards + scale.isRemovedOnCompletion = false + scale.autoreverses = true + scale.repeatCount = .greatestFiniteMagnitude + + let move = CABasicAnimation(keyPath: "position.y") + move.fromValue = NSNumber(value: balloon.frame.origin.y) + move.toValue = NSNumber(value: 0 - balloon.frame.size.height) + move.duration = Double.random(in: 30...100) + move.isRemovedOnCompletion = false + move.repeatCount = .greatestFiniteMagnitude + + balloon.add(scale, forKey: "scale") + balloon.add(move, forKey: "move") + } +} diff --git a/ios/ReactNativeCameraKit/CKOverlayObject.h b/ios/ReactNativeCameraKit/CKOverlayObject.h deleted file mode 100644 index e1f6bf9c6d..0000000000 --- a/ios/ReactNativeCameraKit/CKOverlayObject.h +++ /dev/null @@ -1,13 +0,0 @@ -#import - -@interface CKOverlayObject : NSObject - - -@property (nonatomic, readonly) float width; -@property (nonatomic, readonly) float height; -@property (nonatomic, readonly) float ratio; - --(instancetype)initWithString:(NSString*)str; - - -@end diff --git a/ios/ReactNativeCameraKit/CKOverlayObject.m b/ios/ReactNativeCameraKit/CKOverlayObject.m deleted file mode 100644 index 102db93eae..0000000000 --- a/ios/ReactNativeCameraKit/CKOverlayObject.m +++ /dev/null @@ -1,44 +0,0 @@ -#import "CKOverlayObject.h" - -@interface CKOverlayObject () - -@property (nonatomic, readwrite) float width; -@property (nonatomic, readwrite) float height; -@property (nonatomic, readwrite) float ratio; - -@end - -@implementation CKOverlayObject - --(instancetype)initWithString:(NSString*)str { - - self = [super init]; - - if (self) { - [self commonInit:str]; - } - - return self; -} - --(void)commonInit:(NSString*)str { - - NSArray *array = [str componentsSeparatedByString:@":"]; - if (array.count == 2) { - float height = [array[0] floatValue]; - float width = [array[1] floatValue]; - - if (width != 0 && height != 0) { - self.width = width; - self.height = height; - self.ratio = self.width/self.height; - } - } -} - --(NSString *)description { - return [NSString stringWithFormat:@"width:%f height:%f ratio:%f", self.width, self.height, self.ratio]; -} - - -@end diff --git a/ios/ReactNativeCameraKit/CKOverlayObject.swift b/ios/ReactNativeCameraKit/CKOverlayObject.swift new file mode 100644 index 0000000000..008d160175 --- /dev/null +++ b/ios/ReactNativeCameraKit/CKOverlayObject.swift @@ -0,0 +1,36 @@ +// +// CKOverlayObject.swift +// ReactNativeCameraKit +// + +import Foundation + +struct CKOverlayObject: CustomStringConvertible { + let width: Float + let height: Float + let ratio: Float + + init(from inputString: String) { + let values = inputString.split(separator: ":") + + if values.count == 2, + let inputHeight = Float(values[0]), + let inputWidth = Float(values[1]), + inputHeight != 0, + inputWidth != 0 { + height = inputHeight + width = inputWidth + ratio = width / height + } else { + height = 0 + width = 0 + ratio = 0 + } + } + + // MARK: CustomStringConvertible + + var description: String { + return "height:\(height) width:\(width) ratio:\(ratio)" + } +} diff --git a/ios/ReactNativeCameraKit/CKRatioOverlayView.swift b/ios/ReactNativeCameraKit/CKRatioOverlayView.swift new file mode 100644 index 0000000000..a12a67878b --- /dev/null +++ b/ios/ReactNativeCameraKit/CKRatioOverlayView.swift @@ -0,0 +1,121 @@ +// +// CKRatioOverlayView.swift +// ReactNativeCameraKit +// + +import UIKit + +@objc(CKRatioOverlayView) +public class CKRatioOverlayView: UIView { + private var overlayObject: CKOverlayObject + + private let topView: UIView = UIView() + private let centerView: UIView = UIView() + private let bottomView: UIView = UIView() + + // MARK: - Public + + @objc(initWithFrame:ratioString:overlayColor:) + public init(frame: CGRect, ratioString: String, overlayColor: UIColor?) { + overlayObject = CKOverlayObject(from: ratioString) + + let color = overlayColor ?? UIColor.black.withAlphaComponent(0.3) + topView.backgroundColor = color + bottomView.backgroundColor = color + + super.init(frame: frame) + + addSubview(topView) + addSubview(centerView) + addSubview(bottomView) + + setOverlayParts() + } + + @available(*, unavailable) + required init?(coder aDecoder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + @objc public func setRatio(_ ratioString: String) { + overlayObject = CKOverlayObject(from: ratioString) + + UIView.animate(withDuration: 0.2) { + self.setOverlayParts() + } + } + + // MARK: - Private + + private func setOverlayParts() { + guard overlayObject.ratio != 0 else { + isHidden = true + + return + } + + isHidden = false + + var centerSize = CGSize.zero + var sideSize = CGSize.zero + + if overlayObject.width < overlayObject.height { + centerSize.width = frame.size.width + centerSize.height = frame.size.height * CGFloat(overlayObject.ratio) + + sideSize.width = centerSize.width + sideSize.height = (frame.size.height - centerSize.height) / 2.0 + + topView.frame = CGRect(x: 0, + y: 0, + width: sideSize.width, + height: sideSize.height) + centerView.frame = CGRect(x: 0, + y: topView.frame.size.height + topView.frame.origin.y, + width: centerSize.width, + height: centerSize.height) + bottomView.frame = CGRect(x: 0, + y: centerView.frame.size.height + centerView.frame.origin.y, + width: sideSize.width, + height: sideSize.height) + } else if overlayObject.width > overlayObject.height { + centerSize.width = frame.size.width / CGFloat(overlayObject.ratio) + centerSize.height = frame.size.height + + sideSize.width = (frame.size.width - centerSize.width) / 2.0 + sideSize.height = centerSize.height + + topView.frame = CGRect(x: 0, + y: 0, + width: sideSize.width, + height: sideSize.height) + centerView.frame = CGRect(x: topView.frame.size.width + topView.frame.origin.x, + y: 0, + width: centerSize.width, + height: centerSize.height) + bottomView.frame = CGRect(x: centerView.frame.size.width + centerView.frame.origin.x, + y: 0, + width: sideSize.width, + height: sideSize.height) + } else { // ratio is 1:1 + centerSize.width = frame.size.width + centerSize.height = frame.size.width + + sideSize.width = centerSize.width + sideSize.height = (frame.size.height - centerSize.height) / 2.0 + + topView.frame = CGRect(x: 0, + y: 0, + width: sideSize.width, + height: sideSize.height) + centerView.frame = CGRect(x: 0, + y: topView.frame.size.height + topView.frame.origin.y, + width: centerSize.width, + height: centerSize.height) + bottomView.frame = CGRect(x: 0, + y: centerView.frame.size.height + centerView.frame.origin.y, + width: sideSize.width, + height: sideSize.height) + } + } +} diff --git a/ios/ReactNativeCameraKit/CKTypes+RCTConvert.m b/ios/ReactNativeCameraKit/CKTypes+RCTConvert.m new file mode 100644 index 0000000000..6fb5bb447a --- /dev/null +++ b/ios/ReactNativeCameraKit/CKTypes+RCTConvert.m @@ -0,0 +1,44 @@ +// +// CKCamera+RCTConvert.m +// ReactNativeCameraKit +// + +#if __has_include() +#import +#import +#else +#import "RCTViewManager.h" +#import "RCTConvert.h" +#endif + +#import "ReactNativeCameraKit-Swift.h" + +@implementation RCTConvert (CKTypes) + +RCT_ENUM_CONVERTER(CKCameraType, (@{ + @"back": @(CKCameraTypeBack), + @"front": @(CKCameraTypeFront) +}), CKCameraTypeBack, integerValue) + +RCT_ENUM_CONVERTER(CKCameraFlashMode, (@{ + @"on": @(CKCameraFlashModeOn), + @"off": @(CKCameraFlashModeOff), + @"auto": @(CKCameraFlashModeAuto) +}), CKCameraFlashModeAuto, integerValue) + +RCT_ENUM_CONVERTER(CKCameraTorchMode, (@{ + @"on": @(CKCameraTorchModeOn), + @"off": @(CKCameraTorchModeOff) +}), CKCameraTorchModeOn, integerValue) + +RCT_ENUM_CONVERTER(CKCameraFocusMode, (@{ + @"on": @(CKCameraFocusModeOn), + @"off": @(CKCameraFocusModeOff) +}), CKCameraFocusModeOn, integerValue) + +RCT_ENUM_CONVERTER(CKCameraZoomMode, (@{ + @"on": @(CKCameraZoomModeOn), + @"off": @(CKCameraZoomModeOff) +}), CKCameraZoomModeOn, integerValue) + +@end diff --git a/ios/ReactNativeCameraKit/CKTypes.swift b/ios/ReactNativeCameraKit/CKTypes.swift new file mode 100644 index 0000000000..dc2519e5f0 --- /dev/null +++ b/ios/ReactNativeCameraKit/CKTypes.swift @@ -0,0 +1,85 @@ +// +// CKTypes.swift +// ReactNativeCameraKit +// + +import AVFoundation +import Foundation + +// Dummy class used for RCTConvert +@objc class CKTypes: NSObject {} + +@objc(CKCameraType) +public enum CameraType: Int { + case back + case front + + var avPosition: AVCaptureDevice.Position { + switch self { + case .back: return .back + case .front: return .front + } + } +} + +@objc(CKCameraFlashMode) +public enum FlashMode: Int { + case on + case off + case auto + + var avFlashMode: AVCaptureDevice.FlashMode { + switch self { + case .on: return .on + case .off: return .off + case .auto: return .auto + } + } +} + +@objc(CKCameraTorchMode) +public enum TorchMode: Int { + case on + case off + + init(from string: String) { + switch string { + case "on": self = .on + default: self = .off + } + } + + var avTorchMode: AVCaptureDevice.TorchMode { + switch self { + case .on: return .on + case .off: return .off + } + } +} + +@objc(CKCameraFocusMode) +public enum FocusMode: Int { + case on + case off +} + +@objc(CKCameraZoomMode) +public enum ZoomMode: Int { + case on + case off +} + +// Temporary method to fill gap with ObjC +@objc public class EnumHelper: NSObject { + @objc public static func cameraTypeToAVPosition(_ cameraType: CameraType) -> AVCaptureDevice.Position { + return cameraType.avPosition + } + + @objc public static func flashModeToAVFlashMode(_ flashMode: FlashMode) -> AVCaptureDevice.FlashMode { + return flashMode.avFlashMode + } + + @objc public static func torchModeToAVTorchMode(_ torchMode: TorchMode) -> AVCaptureDevice.TorchMode { + return torchMode.avTorchMode + } +} diff --git a/ios/ReactNativeCameraKit/ReactNativeCameraKit-Bridging-Header.h b/ios/ReactNativeCameraKit/ReactNativeCameraKit-Bridging-Header.h new file mode 100644 index 0000000000..20b30561f2 --- /dev/null +++ b/ios/ReactNativeCameraKit/ReactNativeCameraKit-Bridging-Header.h @@ -0,0 +1,15 @@ +// +// Use this file to import your target's public headers that you would like to expose to Swift. +// + +#if __has_include() +#import +#import +#import +#import +#else +#import "RCTBridgeModule.h" +#import "RCTViewManager.h" +#import "RCTConvert.h" +#import "RCTEventEmitter.h" +#endif diff --git a/ios/ReactNativeCameraKit/ReactNativeCameraKit.h b/ios/ReactNativeCameraKit/ReactNativeCameraKit.h new file mode 100644 index 0000000000..65c49f3270 --- /dev/null +++ b/ios/ReactNativeCameraKit/ReactNativeCameraKit.h @@ -0,0 +1,10 @@ +// +// ReactNativeCameraKit.h +// ReactNativeCameraKit +// + +#ifndef ReactNativeCameraKit_h +#define ReactNativeCameraKit_h + + +#endif /* ReactNativeCameraKit_h */ From bdbfac1f33ae7925e91778dd34d48ab9c64eb687 Mon Sep 17 00:00:00 2001 From: David Bertet Date: Tue, 27 Jun 2023 21:21:18 -0700 Subject: [PATCH 02/20] Swift migration & rewrite --- .../project.pbxproj | 86 +- .../AVCaptureDevice+Setter.swift | 61 + ios/ReactNativeCameraKit/CKCamera.h | 26 - ios/ReactNativeCameraKit/CKCamera.m | 1103 ----------------- ios/ReactNativeCameraKit/CKCamera.swift | 121 -- ios/ReactNativeCameraKit/CKCameraManager.m | 11 +- .../CKCompressedImage.swift | 51 - .../CKOverlayObject.swift | 36 - .../CKRatioOverlayView.swift | 121 -- ios/ReactNativeCameraKit/CKTypes+RCTConvert.m | 42 +- ios/ReactNativeCameraKit/CKTypes.swift | 85 -- ...ameraManager.swift => CameraManager.swift} | 22 +- ios/ReactNativeCameraKit/CameraProtocol.swift | 27 + ios/ReactNativeCameraKit/CameraView.swift | 321 +++++ .../FocusInterfaceView.swift | 216 ++++ .../PhotoCaptureDelegate.swift | 48 + .../RatioOverlayView.swift | 164 +++ .../ReactNativeCameraKit-Bridging-Header.h | 2 + ios/ReactNativeCameraKit/RealCamera.swift | 493 ++++++++ .../RealPreviewView.swift | 25 + .../ScannerFrameView.swift | 99 ++ .../ScannerInterfaceView.swift | 88 ++ .../SimulatorCamera.swift | 93 ++ ...eview.swift => SimulatorPreviewView.swift} | 54 +- ios/ReactNativeCameraKit/Types.swift | 161 +++ 25 files changed, 1924 insertions(+), 1632 deletions(-) create mode 100644 ios/ReactNativeCameraKit/AVCaptureDevice+Setter.swift delete mode 100644 ios/ReactNativeCameraKit/CKCamera.h delete mode 100644 ios/ReactNativeCameraKit/CKCamera.m delete mode 100644 ios/ReactNativeCameraKit/CKCamera.swift delete mode 100644 ios/ReactNativeCameraKit/CKCompressedImage.swift delete mode 100644 ios/ReactNativeCameraKit/CKOverlayObject.swift delete mode 100644 ios/ReactNativeCameraKit/CKRatioOverlayView.swift delete mode 100644 ios/ReactNativeCameraKit/CKTypes.swift rename ios/ReactNativeCameraKit/{CKCameraManager.swift => CameraManager.swift} (68%) create mode 100644 ios/ReactNativeCameraKit/CameraProtocol.swift create mode 100644 ios/ReactNativeCameraKit/CameraView.swift create mode 100644 ios/ReactNativeCameraKit/FocusInterfaceView.swift create mode 100644 ios/ReactNativeCameraKit/PhotoCaptureDelegate.swift create mode 100644 ios/ReactNativeCameraKit/RatioOverlayView.swift create mode 100644 ios/ReactNativeCameraKit/RealCamera.swift create mode 100644 ios/ReactNativeCameraKit/RealPreviewView.swift create mode 100644 ios/ReactNativeCameraKit/ScannerFrameView.swift create mode 100644 ios/ReactNativeCameraKit/ScannerInterfaceView.swift create mode 100644 ios/ReactNativeCameraKit/SimulatorCamera.swift rename ios/ReactNativeCameraKit/{CKMockPreview.swift => SimulatorPreviewView.swift} (74%) create mode 100644 ios/ReactNativeCameraKit/Types.swift diff --git a/ios/ReactNativeCameraKit.xcodeproj/project.pbxproj b/ios/ReactNativeCameraKit.xcodeproj/project.pbxproj index 20970b0d4b..c070d40b28 100644 --- a/ios/ReactNativeCameraKit.xcodeproj/project.pbxproj +++ b/ios/ReactNativeCameraKit.xcodeproj/project.pbxproj @@ -8,15 +8,20 @@ /* Begin PBXBuildFile section */ 26550AF61CFC7086007FF2DF /* CKCameraManager.m in Sources */ = {isa = PBXBuildFile; fileRef = 26550AF51CFC7086007FF2DF /* CKCameraManager.m */; }; - 2685AA241CFD89A300E4A446 /* CKCamera.m in Sources */ = {isa = PBXBuildFile; fileRef = 2685AA231CFD89A300E4A446 /* CKCamera.m */; }; - 4620AA6A2A2BFDC400BC8929 /* CKOverlayObject.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4620AA692A2BFDC400BC8929 /* CKOverlayObject.swift */; }; - 4620AA6C2A2C03FC00BC8929 /* CKRatioOverlayView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4620AA6B2A2C03FC00BC8929 /* CKRatioOverlayView.swift */; }; - 4620AA6E2A2C090500BC8929 /* CKCompressedImage.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4620AA6D2A2C090500BC8929 /* CKCompressedImage.swift */; }; - 4620AA702A2C4A5F00BC8929 /* CKMockPreview.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4620AA6F2A2C4A5F00BC8929 /* CKMockPreview.swift */; }; - 4620AA722A2C4FA500BC8929 /* CKCameraManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4620AA712A2C4FA500BC8929 /* CKCameraManager.swift */; }; - 4620AA742A2C52C300BC8929 /* CKCamera.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4620AA732A2C52C300BC8929 /* CKCamera.swift */; }; + 460C0C6C2A4B52D800066334 /* PhotoCaptureDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 460C0C6B2A4B52D800066334 /* PhotoCaptureDelegate.swift */; }; + 4620AA6C2A2C03FC00BC8929 /* RatioOverlayView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4620AA6B2A2C03FC00BC8929 /* RatioOverlayView.swift */; }; + 4620AA702A2C4A5F00BC8929 /* SimulatorPreviewView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4620AA6F2A2C4A5F00BC8929 /* SimulatorPreviewView.swift */; }; + 4620AA722A2C4FA500BC8929 /* CameraManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4620AA712A2C4FA500BC8929 /* CameraManager.swift */; }; + 4620AA742A2C52C300BC8929 /* CameraView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4620AA732A2C52C300BC8929 /* CameraView.swift */; }; 463096882A2C757F002ABA1A /* CKTypes+RCTConvert.m in Sources */ = {isa = PBXBuildFile; fileRef = 463096872A2C757F002ABA1A /* CKTypes+RCTConvert.m */; }; - 4630968B2A2D5423002ABA1A /* CKTypes.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4630968A2A2D5423002ABA1A /* CKTypes.swift */; }; + 4630968B2A2D5423002ABA1A /* Types.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4630968A2A2D5423002ABA1A /* Types.swift */; }; + 46506F272A37810C0058D3F2 /* RealPreviewView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 46506F262A37810B0058D3F2 /* RealPreviewView.swift */; }; + 46C558C92A4AAAD100C68BA0 /* RealCamera.swift in Sources */ = {isa = PBXBuildFile; fileRef = 46C558C82A4AAAD100C68BA0 /* RealCamera.swift */; }; + 46C558CB2A4AAB3400C68BA0 /* CameraProtocol.swift in Sources */ = {isa = PBXBuildFile; fileRef = 46C558CA2A4AAB3400C68BA0 /* CameraProtocol.swift */; }; + 46C558CD2A4AAB5D00C68BA0 /* SimulatorCamera.swift in Sources */ = {isa = PBXBuildFile; fileRef = 46C558CC2A4AAB5D00C68BA0 /* SimulatorCamera.swift */; }; + 46C558CF2A4AAD7300C68BA0 /* FocusInterfaceView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 46C558CE2A4AAD7300C68BA0 /* FocusInterfaceView.swift */; }; + 46F30C012A3A859B000597F6 /* ScannerFrameView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 46F30C002A3A859B000597F6 /* ScannerFrameView.swift */; }; + 46F30C032A3ABB9D000597F6 /* ScannerInterfaceView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 46F30C022A3ABB9D000597F6 /* ScannerInterfaceView.swift */; }; /* End PBXBuildFile section */ /* Begin PBXCopyFilesBuildPhase section */ @@ -34,18 +39,22 @@ /* Begin PBXFileReference section */ 2646934E1CFB2A6B00F3A740 /* libReactNativeCameraKit.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = libReactNativeCameraKit.a; sourceTree = BUILT_PRODUCTS_DIR; }; 26550AF51CFC7086007FF2DF /* CKCameraManager.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CKCameraManager.m; sourceTree = ""; }; - 2685AA221CFD89A300E4A446 /* CKCamera.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CKCamera.h; sourceTree = ""; }; - 2685AA231CFD89A300E4A446 /* CKCamera.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CKCamera.m; sourceTree = ""; }; + 460C0C6B2A4B52D800066334 /* PhotoCaptureDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PhotoCaptureDelegate.swift; sourceTree = ""; }; 4620AA682A2BFDBC00BC8929 /* ReactNativeCameraKit-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "ReactNativeCameraKit-Bridging-Header.h"; sourceTree = ""; }; - 4620AA692A2BFDC400BC8929 /* CKOverlayObject.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CKOverlayObject.swift; sourceTree = ""; }; - 4620AA6B2A2C03FC00BC8929 /* CKRatioOverlayView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CKRatioOverlayView.swift; sourceTree = ""; }; - 4620AA6D2A2C090500BC8929 /* CKCompressedImage.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CKCompressedImage.swift; sourceTree = ""; }; - 4620AA6F2A2C4A5F00BC8929 /* CKMockPreview.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CKMockPreview.swift; sourceTree = ""; }; - 4620AA712A2C4FA500BC8929 /* CKCameraManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CKCameraManager.swift; sourceTree = ""; }; - 4620AA732A2C52C300BC8929 /* CKCamera.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CKCamera.swift; sourceTree = ""; }; + 4620AA6B2A2C03FC00BC8929 /* RatioOverlayView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RatioOverlayView.swift; sourceTree = ""; }; + 4620AA6F2A2C4A5F00BC8929 /* SimulatorPreviewView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SimulatorPreviewView.swift; sourceTree = ""; }; + 4620AA712A2C4FA500BC8929 /* CameraManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraManager.swift; sourceTree = ""; }; + 4620AA732A2C52C300BC8929 /* CameraView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraView.swift; sourceTree = ""; }; 463096872A2C757F002ABA1A /* CKTypes+RCTConvert.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = "CKTypes+RCTConvert.m"; sourceTree = ""; }; 463096892A2C7D89002ABA1A /* ReactNativeCameraKit.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ReactNativeCameraKit.h; sourceTree = ""; }; - 4630968A2A2D5423002ABA1A /* CKTypes.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CKTypes.swift; sourceTree = ""; }; + 4630968A2A2D5423002ABA1A /* Types.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Types.swift; sourceTree = ""; }; + 46506F262A37810B0058D3F2 /* RealPreviewView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RealPreviewView.swift; sourceTree = ""; }; + 46C558C82A4AAAD100C68BA0 /* RealCamera.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RealCamera.swift; sourceTree = ""; }; + 46C558CA2A4AAB3400C68BA0 /* CameraProtocol.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraProtocol.swift; sourceTree = ""; }; + 46C558CC2A4AAB5D00C68BA0 /* SimulatorCamera.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SimulatorCamera.swift; sourceTree = ""; }; + 46C558CE2A4AAD7300C68BA0 /* FocusInterfaceView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FocusInterfaceView.swift; sourceTree = ""; }; + 46F30C002A3A859B000597F6 /* ScannerFrameView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ScannerFrameView.swift; sourceTree = ""; }; + 46F30C022A3ABB9D000597F6 /* ScannerInterfaceView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ScannerInterfaceView.swift; sourceTree = ""; }; /* End PBXFileReference section */ /* Begin PBXFrameworksBuildPhase section */ @@ -79,16 +88,20 @@ isa = PBXGroup; children = ( 26550AF51CFC7086007FF2DF /* CKCameraManager.m */, - 4620AA712A2C4FA500BC8929 /* CKCameraManager.swift */, - 2685AA221CFD89A300E4A446 /* CKCamera.h */, - 2685AA231CFD89A300E4A446 /* CKCamera.m */, + 4620AA712A2C4FA500BC8929 /* CameraManager.swift */, 463096872A2C757F002ABA1A /* CKTypes+RCTConvert.m */, - 4630968A2A2D5423002ABA1A /* CKTypes.swift */, - 4620AA732A2C52C300BC8929 /* CKCamera.swift */, - 4620AA6D2A2C090500BC8929 /* CKCompressedImage.swift */, - 4620AA6B2A2C03FC00BC8929 /* CKRatioOverlayView.swift */, - 4620AA692A2BFDC400BC8929 /* CKOverlayObject.swift */, - 4620AA6F2A2C4A5F00BC8929 /* CKMockPreview.swift */, + 4620AA732A2C52C300BC8929 /* CameraView.swift */, + 46C558CA2A4AAB3400C68BA0 /* CameraProtocol.swift */, + 46C558C82A4AAAD100C68BA0 /* RealCamera.swift */, + 46506F262A37810B0058D3F2 /* RealPreviewView.swift */, + 460C0C6B2A4B52D800066334 /* PhotoCaptureDelegate.swift */, + 46C558CC2A4AAB5D00C68BA0 /* SimulatorCamera.swift */, + 4620AA6F2A2C4A5F00BC8929 /* SimulatorPreviewView.swift */, + 4630968A2A2D5423002ABA1A /* Types.swift */, + 4620AA6B2A2C03FC00BC8929 /* RatioOverlayView.swift */, + 46F30C002A3A859B000597F6 /* ScannerFrameView.swift */, + 46F30C022A3ABB9D000597F6 /* ScannerInterfaceView.swift */, + 46C558CE2A4AAD7300C68BA0 /* FocusInterfaceView.swift */, 4620AA682A2BFDBC00BC8929 /* ReactNativeCameraKit-Bridging-Header.h */, 463096892A2C7D89002ABA1A /* ReactNativeCameraKit.h */, ); @@ -153,16 +166,21 @@ isa = PBXSourcesBuildPhase; buildActionMask = 2147483647; files = ( + 46C558CF2A4AAD7300C68BA0 /* FocusInterfaceView.swift in Sources */, 463096882A2C757F002ABA1A /* CKTypes+RCTConvert.m in Sources */, - 4620AA742A2C52C300BC8929 /* CKCamera.swift in Sources */, - 4620AA702A2C4A5F00BC8929 /* CKMockPreview.swift in Sources */, - 4630968B2A2D5423002ABA1A /* CKTypes.swift in Sources */, - 4620AA6C2A2C03FC00BC8929 /* CKRatioOverlayView.swift in Sources */, + 4620AA742A2C52C300BC8929 /* CameraView.swift in Sources */, + 4620AA702A2C4A5F00BC8929 /* SimulatorPreviewView.swift in Sources */, + 46F30C032A3ABB9D000597F6 /* ScannerInterfaceView.swift in Sources */, + 46C558C92A4AAAD100C68BA0 /* RealCamera.swift in Sources */, + 46F30C012A3A859B000597F6 /* ScannerFrameView.swift in Sources */, + 46C558CD2A4AAB5D00C68BA0 /* SimulatorCamera.swift in Sources */, + 46506F272A37810C0058D3F2 /* RealPreviewView.swift in Sources */, + 4630968B2A2D5423002ABA1A /* Types.swift in Sources */, + 4620AA6C2A2C03FC00BC8929 /* RatioOverlayView.swift in Sources */, + 460C0C6C2A4B52D800066334 /* PhotoCaptureDelegate.swift in Sources */, 26550AF61CFC7086007FF2DF /* CKCameraManager.m in Sources */, - 4620AA722A2C4FA500BC8929 /* CKCameraManager.swift in Sources */, - 4620AA6E2A2C090500BC8929 /* CKCompressedImage.swift in Sources */, - 4620AA6A2A2BFDC400BC8929 /* CKOverlayObject.swift in Sources */, - 2685AA241CFD89A300E4A446 /* CKCamera.m in Sources */, + 46C558CB2A4AAB3400C68BA0 /* CameraProtocol.swift in Sources */, + 4620AA722A2C4FA500BC8929 /* CameraManager.swift in Sources */, ); runOnlyForDeploymentPostprocessing = 0; }; diff --git a/ios/ReactNativeCameraKit/AVCaptureDevice+Setter.swift b/ios/ReactNativeCameraKit/AVCaptureDevice+Setter.swift new file mode 100644 index 0000000000..fb3fb6c9d6 --- /dev/null +++ b/ios/ReactNativeCameraKit/AVCaptureDevice+Setter.swift @@ -0,0 +1,61 @@ +// +// AVCaptureDevice+Setter.swift +// ReactNativeCameraKit +// + +import AVFoundation + +extension AVCaptureDevice { + func setTorchMode(_ newTorchMode: AVCaptureDevice.TorchMode) { + if isTorchModeSupported(newTorchMode) && hasTorch { + do { + try lockForConfiguration() + torchMode = newTorchMode + unlockForConfiguration() + } catch { + print("Error setting torch mode: \(error)") + } + } + } + + func incrementZoomFactor(_ zoomFactorIncrement: CGFloat) { + do { + try lockForConfiguration() + + var zoomFactor = videoZoomFactor + zoomFactorIncrement + if zoomFactor > activeFormat.videoMaxZoomFactor { + zoomFactor = activeFormat.videoMaxZoomFactor + } else if zoomFactor < 1 { + zoomFactor = 1.0 + } + videoZoomFactor = zoomFactor + unlockForConfiguration() + } catch { + print("Error setting zoom factor: \(error)") + } + } + + func focusWithMode(_ focusMode: AVCaptureDevice.FocusMode, + exposeWithMode exposureMode: AVCaptureDevice.ExposureMode, + atDevicePoint point: CGPoint, + isSubjectAreaChangeMonitoringEnabled: Bool) { + do { + try lockForConfiguration() + + if isFocusPointOfInterestSupported && isFocusModeSupported(focusMode) { + focusPointOfInterest = point + self.focusMode = focusMode + } + + if isExposurePointOfInterestSupported && isExposureModeSupported(exposureMode) { + exposurePointOfInterest = point + self.exposureMode = exposureMode + } + + self.isSubjectAreaChangeMonitoringEnabled = isSubjectAreaChangeMonitoringEnabled + unlockForConfiguration() + } catch { + print("Error setting focus: \(error)") + } + } +} diff --git a/ios/ReactNativeCameraKit/CKCamera.h b/ios/ReactNativeCameraKit/CKCamera.h deleted file mode 100644 index d5f200901e..0000000000 --- a/ios/ReactNativeCameraKit/CKCamera.h +++ /dev/null @@ -1,26 +0,0 @@ -#import -@import AVFoundation; - -#if __has_include() -#import -#else -#import "RCTConvert.h" -#endif - - -typedef void (^CaptureBlock)(NSDictionary *imageObject); -typedef void (^CallbackBlock)(BOOL success); - -@interface CKCamera : UIView - -@property (nonatomic, readonly) AVCaptureDeviceInput *videoDeviceInput; - - -// api -- (void)snapStillImage:(NSDictionary*)options success:(CaptureBlock)block onError:(void (^)(NSString*))onError; - -- (void)setTorchMode:(AVCaptureTorchMode)torchMode; - -+ (NSURL*)saveToTmpFolder:(NSData*)data; - -@end diff --git a/ios/ReactNativeCameraKit/CKCamera.m b/ios/ReactNativeCameraKit/CKCamera.m deleted file mode 100644 index cc96cd3c02..0000000000 --- a/ios/ReactNativeCameraKit/CKCamera.m +++ /dev/null @@ -1,1103 +0,0 @@ -@import Foundation; -@import Photos; - -#if __has_include() -#import -#import -#import -#else -#import "UIView+React.h" -#import "RCTConvert.h" -#import "RCTViewManager.h" -#endif - -#import "CKCamera.h" -#import "ReactNativeCameraKit-Swift.h" - -AVCaptureVideoOrientation AVCaptureVideoOrientationFromInterfaceOrientation(UIInterfaceOrientation orientation){ - if (orientation == UIInterfaceOrientationPortrait) { - return AVCaptureVideoOrientationPortrait; - } else if (orientation == UIInterfaceOrientationLandscapeLeft){ - return AVCaptureVideoOrientationLandscapeLeft; - } else if (orientation == UIInterfaceOrientationLandscapeRight){ - return AVCaptureVideoOrientationLandscapeRight; - } else if (orientation == UIInterfaceOrientationPortraitUpsideDown){ - return AVCaptureVideoOrientationPortraitUpsideDown; - } else { - @throw @"unknown interface orientation"; - } -} - -static void * CapturingStillImageContext = &CapturingStillImageContext; -static void * SessionRunningContext = &SessionRunningContext; - -typedef NS_ENUM( NSInteger, CKSetupResult ) { - CKSetupResultSuccess, - CKSetupResultCameraNotAuthorized, - CKSetupResultSessionConfigurationFailed -}; - - -@interface CKCamera () - - -@property (nonatomic, strong) AVCaptureVideoPreviewLayer *previewLayer; -@property (nonatomic, strong) CKMockPreview *mockPreview; -@property (nonatomic, strong) UIView *focusView; -@property (nonatomic, strong) NSTimer *focusViewTimer; -@property (nonatomic, strong) CKRatioOverlayView *ratioOverlayView; - -@property (nonatomic, strong) NSTimer *focusResetTimer; -@property (nonatomic) BOOL startFocusResetTimerAfterFocusing; -@property (nonatomic) NSInteger resetFocusTimeout; -@property (nonatomic) BOOL resetFocusWhenMotionDetected; -@property (nonatomic) BOOL tapToFocusEngaged; - -// session management -@property (nonatomic) dispatch_queue_t sessionQueue; -@property (nonatomic) AVCaptureSession *session; -@property (nonatomic, readwrite) AVCaptureDeviceInput *videoDeviceInput; -@property (nonatomic) AVCaptureMovieFileOutput *movieFileOutput; -@property (nonatomic) AVCaptureStillImageOutput *stillImageOutput; -@property (nonatomic, strong) AVCaptureMetadataOutput *metadataOutput; -@property (nonatomic, strong) NSString *codeStringValue; - - -// utilities -@property (nonatomic) CKSetupResult setupResult; -@property (nonatomic, getter=isSessionRunning) BOOL sessionRunning; -@property (nonatomic) UIBackgroundTaskIdentifier backgroundRecordingID; - -// scanner options -@property (nonatomic) BOOL showFrame; -@property (nonatomic) UIView *scannerView; -@property (nonatomic, strong) RCTDirectEventBlock onReadCode; -@property (nonatomic) CGFloat frameOffset; -@property (nonatomic) CGFloat frameHeight; -@property (nonatomic, strong) UIColor *laserColor; -@property (nonatomic, strong) UIColor *frameColor; -@property (nonatomic) UIView * dataReadingFrame; - -// camera options -@property (nonatomic) CKCameraType cameraType; -@property (nonatomic) CKCameraFlashMode flashMode; -@property (nonatomic) CKCameraTorchMode torchMode; -@property (nonatomic) CKCameraFocusMode focusMode; -@property (nonatomic) CKCameraZoomMode zoomMode; -@property (nonatomic, strong) NSString* ratioOverlay; -@property (nonatomic, strong) UIColor *ratioOverlayColor; -@property (nonatomic, strong) RCTDirectEventBlock onOrientationChange; - -@property (nonatomic) BOOL isAddedOberver; - -@end - -@implementation CKCamera - -#pragma mark - initializtion - -- (void)dealloc -{ - [self removeObservers]; - [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications]; -} - --(PHFetchOptions *)fetchOptions { - - PHFetchOptions *fetchOptions = [PHFetchOptions new]; - fetchOptions.sortDescriptors = @[[NSSortDescriptor sortDescriptorWithKey:@"creationDate" ascending:NO]]; - fetchOptions.predicate = [NSPredicate predicateWithFormat:@"mediaType = %d && creationDate <= %@",PHAssetMediaTypeImage, [NSDate date]]; - // iOS 9+ - if ([fetchOptions respondsToSelector:@selector(fetchLimit)]) { - fetchOptions.fetchLimit = 1; - } - - return fetchOptions; -} - -- (void)removeReactSubview:(UIView *)subview -{ - [subview removeFromSuperview]; - [super removeReactSubview:subview]; -} - -- (void)removeFromSuperview -{ - [[NSNotificationCenter defaultCenter] removeObserver:self name:UIDeviceOrientationDidChangeNotification object:[UIDevice currentDevice]]; - dispatch_async( self.sessionQueue, ^{ - if ( self.setupResult == CKSetupResultSuccess ) { - [self.session stopRunning]; - [self removeObservers]; - } - } ); - [super removeFromSuperview]; - -} - -- (instancetype)initWithFrame:(CGRect)frame { - self = [super initWithFrame:frame]; - - if (self){ - // Create the AVCaptureSession. - self.session = [[AVCaptureSession alloc] init]; - - // Listen to orientation changes - [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications]; - [[NSNotificationCenter defaultCenter] - addObserver:self selector:@selector(orientationChanged:) - name:UIDeviceOrientationDidChangeNotification - object:[UIDevice currentDevice]]; - - // Fit camera preview inside of viewport - self.session.sessionPreset = AVCaptureSessionPresetPhoto; - - // Communicate with the session and other session objects on this queue. - self.sessionQueue = dispatch_queue_create( "session queue", DISPATCH_QUEUE_SERIAL ); - - [self handleCameraPermission]; - -#if (TARGET_IPHONE_SIMULATOR) - // Create mock camera layer. When a photo is taken, we capture this layer and save it in place of a - // hardware input. - self.mockPreview = [[CKMockPreview alloc] initWithFrame:CGRectZero]; - [self addSubview:self.mockPreview]; -#else - self.previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.session]; - [self.layer addSublayer:self.previewLayer]; - self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill; - [self setupCaptureSession]; -#endif - - UIView *focusView = [[UIView alloc] initWithFrame:CGRectZero]; - focusView.backgroundColor = [UIColor clearColor]; - focusView.layer.borderColor = [UIColor yellowColor].CGColor; - focusView.layer.borderWidth = 1; - focusView.hidden = YES; - self.focusView = focusView; - - [self addSubview:self.focusView]; - - // defaults - self.zoomMode = CKCameraZoomModeOn; - self.flashMode = CKCameraFlashModeAuto; - self.focusMode = CKCameraFocusModeOn; - - self.frameColor = [UIColor whiteColor]; - self.laserColor = [UIColor redColor]; - self.frameOffset = 30; - self.frameHeight = 200; - } - - return self; -} - -- (void)setCameraType:(CKCameraType)cameraType { - if (cameraType != _cameraType) { - _cameraType = cameraType; - [self changeCamera:[EnumHelper cameraTypeToAVPosition: cameraType]]; - } -} - -- (void)setFlashMode:(CKCameraFlashMode)flashMode { - if (flashMode != _flashMode) { - _flashMode = flashMode; - [CKCamera setFlashMode:[EnumHelper flashModeToAVFlashMode: flashMode] forDevice:self.videoDeviceInput.device]; - } -} - --(void)setTorchMode:(CKCameraTorchMode)torchMode { - _torchMode = torchMode; - AVCaptureTorchMode avTorchMode = [EnumHelper torchModeToAVTorchMode: torchMode]; - if (self.videoDeviceInput && [self.videoDeviceInput.device isTorchModeSupported:avTorchMode] && self.videoDeviceInput.device.hasTorch) { - NSError* err = nil; - if ( [self.videoDeviceInput.device lockForConfiguration:&err] ) { - [self.videoDeviceInput.device setTorchMode:avTorchMode]; - [self.videoDeviceInput.device unlockForConfiguration]; - } - } -} - - -+ (void)setFlashMode:(AVCaptureFlashMode)flashMode forDevice:(AVCaptureDevice *)device -{ - if (device.hasFlash && [device isFlashModeSupported:flashMode] ) { - NSError *error = nil; - if ([device lockForConfiguration:&error] ) { - device.flashMode = flashMode; - [device unlockForConfiguration]; - } else { - NSLog(@"Could not lock device for configuration: %@", error); - } - } -} - -- (void)setFocusMode:(CKCameraFocusMode)focusMode { - _focusMode = focusMode; - if (self.focusMode == CKCameraFocusModeOn) { - UITapGestureRecognizer *tapGesture = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(focusAndExposeTap:)]; - [self addGestureRecognizer:tapGesture]; - } else { - NSArray *gestures = [self gestureRecognizers]; - for (id object in gestures) { - if ([object class] == UITapGestureRecognizer.class) { - [self removeGestureRecognizer:object]; - } - } - } -} - -- (void)setZoomMode:(CKCameraZoomMode)zoomMode { - _zoomMode = zoomMode; - if (zoomMode == CKCameraZoomModeOn) { - UIPinchGestureRecognizer *pinchGesture = [[UIPinchGestureRecognizer alloc] initWithTarget:self action:@selector(handlePinchToZoomRecognizer:)]; - [self addGestureRecognizer:pinchGesture]; - } else { - NSArray *gestures = [self gestureRecognizers]; - for (id object in gestures) { - if ([object class] == UIPinchGestureRecognizer.class) { - [self removeGestureRecognizer:object]; - } - } - } -} - -- (void)setRatio:(NSString*)ratio { - if (ratio && ![ratio isEqualToString:@""]) { - self.ratioOverlay = ratio; - } -} - -- (void)setLaserColor:(UIColor *)color { - if (color != nil) { - _laserColor = color; - } -} - -- (void)setFrameColor:(UIColor *)color { - if (color != nil) { - _frameColor = color; - } -} - -- (void) orientationChanged:(NSNotification *)notification -{ - if (!self.onOrientationChange) { - return; - } - - // PORTRAIT: 0, // ⬆️ - // LANDSCAPE_LEFT: 1, // ⬅️ - // PORTRAIT_UPSIDE_DOWN: 2, // ⬇️ - // LANDSCAPE_RIGHT: 3, // ➡️ - - UIDevice * device = notification.object; - UIDeviceOrientation orientation = device.orientation; - if (orientation == UIDeviceOrientationPortrait) { - self.onOrientationChange(@{@"orientation": @0}); - } else if (orientation == UIDeviceOrientationLandscapeLeft) { - self.onOrientationChange(@{@"orientation": @1}); - } else if (orientation == UIDeviceOrientationPortraitUpsideDown) { - self.onOrientationChange(@{@"orientation": @2}); - } else if (orientation == UIDeviceOrientationLandscapeRight) { - self.onOrientationChange(@{@"orientation": @3}); - } -} - -- (void) setupCaptureSession { - // Setup the capture session. - // In general it is not safe to mutate an AVCaptureSession or any of its inputs, outputs, or connections from multiple threads at the same time. - // Why not do all of this on the main queue? - // Because -[AVCaptureSession startRunning] is a blocking call which can take a long time. We dispatch session setup to the sessionQueue - // so that the main queue isn't blocked, which keeps the UI responsive. - dispatch_async( self.sessionQueue, ^{ - if ( self.setupResult != CKSetupResultSuccess ) { - return; - } - - self.backgroundRecordingID = UIBackgroundTaskInvalid; - NSError *error = nil; - - AVCaptureDevice *videoDevice = [CKCamera deviceWithMediaType:AVMediaTypeVideo preferringPosition:AVCaptureDevicePositionBack]; - AVCaptureDeviceInput *videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error]; - - [self.session beginConfiguration]; - - if ( [self.session canAddInput:videoDeviceInput] ) { - [self.session addInput:videoDeviceInput]; - self.videoDeviceInput = videoDeviceInput; - [CKCamera setFlashMode:[EnumHelper flashModeToAVFlashMode: self.flashMode] forDevice:self.videoDeviceInput.device]; - } - else { - self.setupResult = CKSetupResultSessionConfigurationFailed; - } - - AVCaptureMovieFileOutput *movieFileOutput = [[AVCaptureMovieFileOutput alloc] init]; - if ( [self.session canAddOutput:movieFileOutput] ) { - [self.session addOutput:movieFileOutput]; - AVCaptureConnection *connection = [movieFileOutput connectionWithMediaType:AVMediaTypeVideo]; - if ( connection.isVideoStabilizationSupported ) { - connection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeAuto; - } - self.movieFileOutput = movieFileOutput; - } - else { - self.setupResult = CKSetupResultSessionConfigurationFailed; - } - - AVCaptureStillImageOutput *stillImageOutput = [[AVCaptureStillImageOutput alloc] init]; - if ( [self.session canAddOutput:stillImageOutput] ) { - stillImageOutput.outputSettings = @{AVVideoCodecKey : AVVideoCodecJPEG}; - [self.session addOutput:stillImageOutput]; - self.stillImageOutput = stillImageOutput; - } - else { - self.setupResult = CKSetupResultSessionConfigurationFailed; - } - - AVCaptureMetadataOutput * output = [[AVCaptureMetadataOutput alloc] init]; - if ([self.session canAddOutput:output]) { - self.metadataOutput = output; - [self.session addOutput:self.metadataOutput]; - [self.metadataOutput setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()]; - [self.metadataOutput setMetadataObjectTypes:[self.metadataOutput availableMetadataObjectTypes]]; - } - - [self.session commitConfiguration]; - dispatch_async(dispatch_get_main_queue(), ^{ - [self setInitialPreviewLayerVideoOrientation]; - }); - } ); -} - -- (void)setInitialPreviewLayerVideoOrientation{ - UIInterfaceOrientation initialInterfaceOrientation = [UIApplication sharedApplication].statusBarOrientation; - self.previewLayer.connection.videoOrientation = AVCaptureVideoOrientationFromInterfaceOrientation(initialInterfaceOrientation); -} - --(void)handleCameraPermission { - - switch ( [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo] ) - { - case AVAuthorizationStatusAuthorized: - { - // The user has previously granted access to the camera. - break; - } - case AVAuthorizationStatusNotDetermined: - { - // The user has not yet been presented with the option to grant video access. - // We suspend the session queue to delay session setup until the access request has completed to avoid - // asking the user for audio access if video access is denied. - // Note that audio access will be implicitly requested when we create an AVCaptureDeviceInput for audio during session setup. - dispatch_suspend( self.sessionQueue ); - [AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^( BOOL granted ) { - if ( ! granted ) { - self.setupResult = CKSetupResultCameraNotAuthorized; - } - dispatch_resume( self.sessionQueue ); - }]; - break; - } - default: - { - // The user has previously denied access. - self.setupResult = CKSetupResultCameraNotAuthorized; - break; - } - } -} - --(void)reactSetFrame:(CGRect)frame { - [super reactSetFrame:frame]; - - self.previewLayer.frame = self.bounds; - -#if TARGET_IPHONE_SIMULATOR - self.mockPreview.frame = self.bounds; - return; -#endif - - [self setOverlayRatioView]; - - dispatch_async( self.sessionQueue, ^{ - switch ( self.setupResult ) - { - case CKSetupResultSuccess: - { - // Only setup observers and start the session running if setup succeeded. - [self addObservers]; - [self.session startRunning]; - self.sessionRunning = self.session.isRunning; - if (self.showFrame) { - dispatch_async(dispatch_get_main_queue(), ^{ - [self addFrameForScanner]; - }); - } - break; - } - case CKSetupResultCameraNotAuthorized: - { - // dispatch_async( dispatch_get_main_queue(), ^{ - // NSString *message = NSLocalizedString( @"AVCam doesn't have permission to use the camera, please change privacy settings", @"Alert message when the user has denied access to the camera" ); - // UIAlertController *alertController = [UIAlertController alertControllerWithTitle:@"AVCam" message:message preferredStyle:UIAlertControllerStyleAlert]; - // UIAlertAction *cancelAction = [UIAlertAction actionWithTitle:NSLocalizedString( @"OK", @"Alert OK button" ) style:UIAlertActionStyleCancel handler:nil]; - // [alertController addAction:cancelAction]; - // // Provide quick access to Settings. - // UIAlertAction *settingsAction = [UIAlertAction actionWithTitle:NSLocalizedString( @"Settings", @"Alert button to open Settings" ) style:UIAlertActionStyleDefault handler:^( UIAlertAction *action ) { - // [[UIApplication sharedApplication] openURL:[NSURL URLWithString:UIApplicationOpenSettingsURLString]]; - // }]; - // [alertController addAction:settingsAction]; - // [self presentViewController:alertController animated:YES completion:nil]; - // } ); - break; - } - case CKSetupResultSessionConfigurationFailed: - { - // dispatch_async( dispatch_get_main_queue(), ^{ - // NSString *message = NSLocalizedString( @"Unable to capture media", @"Alert message when something goes wrong during capture session configuration" ); - // UIAlertController *alertController = [UIAlertController alertControllerWithTitle:@"AVCam" message:message preferredStyle:UIAlertControllerStyleAlert]; - // UIAlertAction *cancelAction = [UIAlertAction actionWithTitle:NSLocalizedString( @"OK", @"Alert OK button" ) style:UIAlertActionStyleCancel handler:nil]; - // [alertController addAction:cancelAction]; - // [self presentViewController:alertController animated:YES completion:nil]; - // } ); - break; - } - } - } ); -} - --(void)setRatioOverlay:(NSString *)ratioOverlay { - _ratioOverlay = ratioOverlay; - [self.ratioOverlayView setRatio:self.ratioOverlay]; -} - --(void)setOverlayRatioView { - if (self.ratioOverlay) { - [self.ratioOverlayView removeFromSuperview]; - self.ratioOverlayView = [[CKRatioOverlayView alloc] initWithFrame:self.bounds ratioString:self.ratioOverlay overlayColor:self.ratioOverlayColor]; - [self addSubview:self.ratioOverlayView]; - } -} - - -#pragma mark - - - -+ (AVCaptureDevice *)deviceWithMediaType:(NSString *)mediaType preferringPosition:(AVCaptureDevicePosition)position { - NSArray *devices = [AVCaptureDevice devicesWithMediaType:mediaType]; - AVCaptureDevice *captureDevice = devices.firstObject; - - for (AVCaptureDevice *device in devices) { - if (device.position == position) { - captureDevice = device; - break; - } - } - - return captureDevice; -} - - -#pragma mark - actions - - - -- (void)snapStillImage:(NSDictionary*)options success:(CaptureBlock)onSuccess onError:(void (^)(NSString*))onError { - - #if TARGET_IPHONE_SIMULATOR - [self capturePreviewLayer:options success:onSuccess onError:onError]; - return; - #endif - - dispatch_async( self.sessionQueue, ^{ - AVCaptureConnection *connection = [self.stillImageOutput connectionWithMediaType:AVMediaTypeVideo]; - - UIImageOrientation imageOrientation = UIImageOrientationUp; - switch([UIDevice currentDevice].orientation) { - default: - case UIDeviceOrientationPortrait: - connection.videoOrientation = AVCaptureVideoOrientationPortrait; - imageOrientation = UIImageOrientationUp; - break; - case UIDeviceOrientationPortraitUpsideDown: - connection.videoOrientation = AVCaptureVideoOrientationPortraitUpsideDown; - imageOrientation = UIImageOrientationDown; - break; - case UIDeviceOrientationLandscapeLeft: - imageOrientation = UIImageOrientationRight; - connection.videoOrientation = AVCaptureVideoOrientationLandscapeRight; - break; - case UIDeviceOrientationLandscapeRight: - connection.videoOrientation = AVCaptureVideoOrientationLandscapeLeft; - imageOrientation = UIImageOrientationRightMirrored; - break; - } - - // Capture a still image. - [self.stillImageOutput captureStillImageAsynchronouslyFromConnection:connection completionHandler:^( CMSampleBufferRef imageDataSampleBuffer, NSError *error ) { - if (!imageDataSampleBuffer) { - NSLog(@"Could not capture still image: %@", error); - onError(@"Could not capture still image"); - return; - } - - // The sample buffer is not retained. Create image data before saving the still image to the photo library asynchronously. - NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer]; - - [self writeCapturedImageData:imageData onSuccess:onSuccess onError:onError]; - [self resetFocus]; - }]; - }); -} - -- (void)capturePreviewLayer:(NSDictionary*)options success:(CaptureBlock)onSuccess onError:(void (^)(NSString*))onError -{ - dispatch_async(dispatch_get_main_queue(), ^{ - if (self.mockPreview != nil) { - UIImage *previewSnapshot = [self.mockPreview snapshotWithTimestamp:YES]; // Generate snapshot from main UI thread - dispatch_async( self.sessionQueue, ^{ // write image async - [self writeCapturedImageData:UIImageJPEGRepresentation(previewSnapshot, 0.85) onSuccess:onSuccess onError:onError]; - }); - } else { - onError(@"Simulator image could not be captured from preview layer"); - } - }); -} - -- (void)writeCapturedImageData:(NSData *)imageData onSuccess:(CaptureBlock)onSuccess onError:(void (^)(NSString*))onError { - NSMutableDictionary *imageInfoDict = [[NSMutableDictionary alloc] init]; - - NSNumber *length = [NSNumber numberWithInteger:imageData.length]; - if (length) { - imageInfoDict[@"size"] = length; - } - - NSURL *temporaryFileURL = [CKCamera saveToTmpFolder:imageData]; - if (temporaryFileURL) { - imageInfoDict[@"uri"] = temporaryFileURL.description; - imageInfoDict[@"name"] = temporaryFileURL.lastPathComponent; - } - - onSuccess(imageInfoDict); -} - -- (void)changeCamera:(AVCaptureDevicePosition)preferredPosition -{ - // Avoid chaning device inputs when camera input is denied by the user, since both front and rear vido input devices will be nil - if ( self.setupResult != CKSetupResultSuccess ) { - return; - } -#if TARGET_IPHONE_SIMULATOR - dispatch_async( dispatch_get_main_queue(), ^{ - [self.mockPreview randomize]; - }); - return; -#endif - - dispatch_async( self.sessionQueue, ^{ - AVCaptureDevice *currentVideoDevice = self.videoDeviceInput.device; - AVCaptureDevice *videoDevice = [CKCamera deviceWithMediaType:AVMediaTypeVideo preferringPosition:preferredPosition]; - AVCaptureDeviceInput *videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:nil]; - - [self removeObservers]; - [self.session beginConfiguration]; - - // Remove the existing device input first, since using the front and back camera simultaneously is not supported. - [self.session removeInput:self.videoDeviceInput]; - - if ( [self.session canAddInput:videoDeviceInput] ) { - [[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:currentVideoDevice]; - - [CKCamera setFlashMode:[EnumHelper flashModeToAVFlashMode: self.flashMode] forDevice:videoDevice]; - - [self.session addInput:videoDeviceInput]; - self.videoDeviceInput = videoDeviceInput; - } - else { - [self.session addInput:self.videoDeviceInput]; - } - - AVCaptureConnection *connection = [self.movieFileOutput connectionWithMediaType:AVMediaTypeVideo]; - if ( connection.isVideoStabilizationSupported ) { - connection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeAuto; - } - - [self.session commitConfiguration]; - [self addObservers]; - } ); -} - -+(NSURL*)saveToTmpFolder:(NSData*)data { - NSString *temporaryFileName = [NSProcessInfo processInfo].globallyUniqueString; - NSString *temporaryFilePath = [NSTemporaryDirectory() stringByAppendingPathComponent:[temporaryFileName stringByAppendingPathExtension:@"jpg"]]; - NSURL *temporaryFileURL = [NSURL fileURLWithPath:temporaryFilePath]; - - NSError *error = nil; - [data writeToURL:temporaryFileURL options:NSDataWritingAtomic error:&error]; - - if (error) { - NSLog(@"Error occured while writing image data to a temporary file: %@", error); - } - return temporaryFileURL; -} - - --(void) handlePinchToZoomRecognizer:(UIPinchGestureRecognizer*)pinchRecognizer { - if (pinchRecognizer.state == UIGestureRecognizerStateChanged) { - [self zoom:pinchRecognizer.velocity]; - } -} - - -- (void)focusAndExposeTap:(UIGestureRecognizer *)gestureRecognizer -{ - CGPoint touchPoint = [gestureRecognizer locationInView:self]; - CGPoint devicePoint = [(AVCaptureVideoPreviewLayer *)self.previewLayer captureDevicePointOfInterestForPoint:touchPoint]; - - // Engage manual focus - [self focusWithMode:AVCaptureFocusModeAutoFocus exposeWithMode:AVCaptureExposureModeAutoExpose atDevicePoint:devicePoint monitorSubjectAreaChange:YES]; - - // Disengage manual focus once focusing finishing (if focusTimeout > 0) - // See [self observeValueForKeyPath] - self.startFocusResetTimerAfterFocusing = YES; - - self.tapToFocusEngaged = YES; - - // Animate focus rectangle - CGFloat halfDiagonal = 73; - CGFloat halfDiagonalAnimation = halfDiagonal*2; - - CGRect focusViewFrame = CGRectMake(touchPoint.x - (halfDiagonal/2), - touchPoint.y - (halfDiagonal/2), - halfDiagonal, - halfDiagonal); - - self.focusView.alpha = 0; - self.focusView.hidden = NO; - self.focusView.frame = CGRectMake(touchPoint.x - (halfDiagonalAnimation/2), - touchPoint.y - (halfDiagonalAnimation/2), - halfDiagonalAnimation, - halfDiagonalAnimation); - - [UIView animateWithDuration:0.2 animations:^{ - self.focusView.frame = focusViewFrame; - self.focusView.alpha = 1; - } completion:^(BOOL finished) { - self.focusView.alpha = 1; - self.focusView.frame = focusViewFrame; - }]; -} - -- (void)resetFocus -{ - if (self.focusResetTimer) { - [self.focusResetTimer invalidate]; - self.focusResetTimer = nil; - } - - // Resetting focus to continuous focus, so not interested in resetting anymore - self.startFocusResetTimerAfterFocusing = NO; - - // Avoid showing reset-focus animation after each photo capture - if (!self.tapToFocusEngaged) { - return; - } - - self.tapToFocusEngaged = NO; - - // 1. Reset actual camera focus - CGPoint deviceCenter = CGPointMake(0.5, 0.5); - [self focusWithMode:AVCaptureFocusModeContinuousAutoFocus exposeWithMode:AVCaptureExposureModeContinuousAutoExposure atDevicePoint:deviceCenter monitorSubjectAreaChange:NO]; - - // 2. Create animation to indicate the new focus location - CGPoint layerCenter = [(AVCaptureVideoPreviewLayer *)self.previewLayer pointForCaptureDevicePointOfInterest:deviceCenter]; - - CGFloat halfDiagonal = 123; - CGFloat halfDiagonalAnimation = halfDiagonal*2; - - CGRect focusViewFrame = CGRectMake(layerCenter.x - (halfDiagonal/2), layerCenter.y - (halfDiagonal/2), halfDiagonal, halfDiagonal); - CGRect focusViewFrameForAnimation = CGRectMake(layerCenter.x - (halfDiagonalAnimation/2), layerCenter.y - (halfDiagonalAnimation/2), halfDiagonalAnimation, halfDiagonalAnimation); - - self.focusView.alpha = 0; - self.focusView.hidden = NO; - self.focusView.frame = focusViewFrameForAnimation; - - [UIView animateWithDuration:0.2 animations:^{ - self.focusView.frame = focusViewFrame; - self.focusView.alpha = 1; - } completion:^(BOOL finished) { - self.focusView.alpha = 1; - self.focusView.frame = focusViewFrame; - - if (self.focusViewTimer) { - [self.focusViewTimer invalidate]; - } - self.focusViewTimer = [NSTimer scheduledTimerWithTimeInterval:2 repeats:NO block:^(NSTimer *timer) { - [UIView animateWithDuration:0.2 animations:^{ - self.focusView.alpha = 0; - } completion:^(BOOL finished) { - self.focusView.frame = CGRectZero; - self.focusView.hidden = YES; - }]; - }]; - }]; -} - -- (void)focusWithMode:(AVCaptureFocusMode)focusMode exposeWithMode:(AVCaptureExposureMode)exposureMode atDevicePoint:(CGPoint)point monitorSubjectAreaChange:(BOOL)monitorSubjectAreaChange -{ - dispatch_async( self.sessionQueue, ^{ - AVCaptureDevice *device = self.videoDeviceInput.device; - NSError *error = nil; - if (![device lockForConfiguration:&error]) { - NSLog(@"Unable to device.lockForConfiguration() %@", error); - return; - } - - // Setting (focus/exposure)PointOfInterest alone does not initiate a (focus/exposure) operation. - // Call -set(Focus/Exposure)Mode: to apply the new point of interest. - if ( device.isFocusPointOfInterestSupported && [device isFocusModeSupported:focusMode] ) { - device.focusPointOfInterest = point; - device.focusMode = focusMode; - } - - if ( device.isExposurePointOfInterestSupported && [device isExposureModeSupported:exposureMode] ) { - device.exposurePointOfInterest = point; - device.exposureMode = exposureMode; - } - - device.subjectAreaChangeMonitoringEnabled = monitorSubjectAreaChange && self.resetFocusWhenMotionDetected; - [device unlockForConfiguration]; - }); -} - -- (void)zoom:(CGFloat)velocity { - if (isnan(velocity)) { - return; - } - const CGFloat pinchVelocityDividerFactor = 20.0f; // TODO: calibrate or make this component's property - NSError *error = nil; - AVCaptureDevice *device = [[self videoDeviceInput] device]; - if ([device lockForConfiguration:&error]) { - CGFloat zoomFactor = device.videoZoomFactor + atan(velocity / pinchVelocityDividerFactor); - if (zoomFactor > device.activeFormat.videoMaxZoomFactor) { - zoomFactor = device.activeFormat.videoMaxZoomFactor; - } else if (zoomFactor < 1) { - zoomFactor = 1.0f; - } - device.videoZoomFactor = zoomFactor; - [device unlockForConfiguration]; - } else { - //NSLog(@"error: %@", error); - } -} - - -#pragma mark - Frame for Scanner Settings - -- (void)didMoveToWindow { - [super didMoveToWindow]; - if (self.sessionRunning && self.dataReadingFrame) { - dispatch_async(dispatch_get_main_queue(), ^{ - [self startAnimatingScanner:self.dataReadingFrame]; - }); - } -} - -- (void)addFrameForScanner { - CGFloat frameWidth = self.bounds.size.width - 2 * self.frameOffset; - if (!self.dataReadingFrame) { - self.dataReadingFrame = [[UIView alloc] initWithFrame:CGRectMake(0, 0, frameWidth, self.frameHeight)]; // - self.dataReadingFrame.center = self.center; - self.dataReadingFrame.backgroundColor = [UIColor clearColor]; - [self createCustomFramesForView:self.dataReadingFrame]; - [self addSubview:self.dataReadingFrame]; - - [self startAnimatingScanner:self.dataReadingFrame]; - - [self addVisualEffects:self.dataReadingFrame.frame]; - - CGRect visibleRect = [self.previewLayer metadataOutputRectOfInterestForRect:self.dataReadingFrame.frame]; - self.metadataOutput.rectOfInterest = visibleRect; - } -} - -- (void)createCustomFramesForView:(UIView *)frameView { - CGFloat cornerSize = 20.f; - CGFloat cornerWidth = 2.f; - for (int i = 0; i < 8; i++) { - CGFloat x = 0.0; - CGFloat y = 0.0; - CGFloat width = 0.0; - CGFloat height = 0.0; - switch (i) { - case 0: - x = 0; y = 0; width = cornerWidth; height = cornerSize; - break; - case 1: - x = 0; y = 0; width = cornerSize; height = cornerWidth; - break; - case 2: - x = CGRectGetWidth(frameView.bounds) - cornerSize; y = 0; width = cornerSize; height = cornerWidth; - break; - case 3: - x = CGRectGetWidth(frameView.bounds) - cornerWidth; y = 0; width = cornerWidth; height = cornerSize; - break; - case 4: - x = CGRectGetWidth(frameView.bounds) - cornerWidth; - y = CGRectGetHeight(frameView.bounds) - cornerSize; width = cornerWidth; height = cornerSize; - break; - case 5: - x = CGRectGetWidth(frameView.bounds) - cornerSize; - y = CGRectGetHeight(frameView.bounds) - cornerWidth; width = cornerSize; height = cornerWidth; - break; - case 6: - x = 0; y = CGRectGetHeight(frameView.bounds) - cornerWidth; width = cornerSize; height = cornerWidth; - break; - case 7: - x = 0; y = CGRectGetHeight(frameView.bounds) - cornerSize; width = cornerWidth; height = cornerSize; - break; - } - UIView * cornerView = [[UIView alloc] initWithFrame:CGRectMake(x, y, width, height)]; - cornerView.backgroundColor = self.frameColor; - [frameView addSubview:cornerView]; - } -} - -- (void)addVisualEffects:(CGRect)inputRect { - UIView *topView = [[UIView alloc] initWithFrame:CGRectMake(0, 0, self.frame.size.width, inputRect.origin.y)]; - topView.backgroundColor = [UIColor colorWithRed:0.0/255.0 green:0.0/255.0 blue:0.0/255.0 alpha:0.4]; - [self addSubview:topView]; - - UIView *leftSideView = [[UIView alloc] initWithFrame:CGRectMake(0, inputRect.origin.y, self.frameOffset, self.frameHeight)]; //paddingForScanner scannerHeight - leftSideView.backgroundColor = [UIColor colorWithRed:0.0/255.0 green:0.0/255.0 blue:0.0/255.0 alpha:0.4]; - [self addSubview:leftSideView]; - - UIView *rightSideView = [[UIView alloc] initWithFrame:CGRectMake(inputRect.size.width + self.frameOffset, inputRect.origin.y, self.frameOffset, self.frameHeight)]; - rightSideView.backgroundColor = [UIColor colorWithRed:0.0/255.0 green:0.0/255.0 blue:0.0/255.0 alpha:0.4]; - [self addSubview:rightSideView]; - - UIView *bottomView = [[UIView alloc] initWithFrame:CGRectMake(0, inputRect.origin.y + self.frameHeight, self.frame.size.width, - self.frame.size.height - inputRect.origin.y - self.frameHeight)]; - bottomView.backgroundColor = [UIColor colorWithRed:0.0/255.0 green:0.0/255.0 blue:0.0/255.0 alpha:0.4]; - [self addSubview:bottomView]; -} - -- (void)startAnimatingScanner:(UIView *)inputView { - if (!self.scannerView) { - self.scannerView = [[UIView alloc] initWithFrame:CGRectMake(2, 0, inputView.frame.size.width - 4, 2)]; - self.scannerView.backgroundColor = self.laserColor; - } - if (self.scannerView.frame.origin.y != 0) { - [self.scannerView setFrame:CGRectMake(2, 0, inputView.frame.size.width - 4, 2)]; - } - [inputView addSubview:self.scannerView]; - [UIView animateWithDuration:3 delay:0 options:(UIViewAnimationOptionAutoreverse | UIViewAnimationOptionRepeat) animations:^{ - CGFloat middleX = inputView.frame.size.width / 2; - self.scannerView.center = CGPointMake(middleX, inputView.frame.size.height - 1); - } completion:^(BOOL finished) {}]; -} - -- (void)stopAnimatingScanner { - [self.scannerView removeFromSuperview]; -} - -//Observer actions - -- (void)didEnterBackground:(NSNotification *)notification { - [self stopAnimatingScanner]; -} - -- (void)willEnterForeground:(NSNotification *)notification { - [self startAnimatingScanner:self.dataReadingFrame]; -} - -#pragma mark - observers - -- (void)didChangeStatusBarOrientation:(NSNotification *)notification { - UIInterfaceOrientation currentInterfaceOrientation = [UIApplication sharedApplication].statusBarOrientation; - self.previewLayer.connection.videoOrientation = AVCaptureVideoOrientationFromInterfaceOrientation(currentInterfaceOrientation); -} - -- (void)addObservers -{ - - if (!self.isAddedOberver) { - [self.session addObserver:self forKeyPath:@"running" options:NSKeyValueObservingOptionNew context:SessionRunningContext]; - [self.stillImageOutput addObserver:self forKeyPath:@"capturingStillImage" options:NSKeyValueObservingOptionNew context:CapturingStillImageContext]; - - [self.videoDeviceInput.device addObserver:self forKeyPath:@"adjustingFocus" options:NSKeyValueObservingOptionNew context:nil]; - [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:self.videoDeviceInput.device]; - [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(sessionRuntimeError:) name:AVCaptureSessionRuntimeErrorNotification object:self.session]; - // A session can only run when the app is full screen. It will be interrupted in a multi-app layout, introduced in iOS 9, - // see also the documentation of AVCaptureSessionInterruptionReason. Add observers to handle these session interruptions - // and show a preview is paused message. See the documentation of AVCaptureSessionWasInterruptedNotification for other - // interruption reasons. - [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(sessionWasInterrupted:) name:AVCaptureSessionWasInterruptedNotification object:self.session]; - //Observers for re-usage animation when app go to the background and back - [[NSNotificationCenter defaultCenter] addObserver:self - selector:@selector(didEnterBackground:) name:UIApplicationDidEnterBackgroundNotification - object:nil]; - [[NSNotificationCenter defaultCenter] addObserver:self - selector:@selector(willEnterForeground:) - name:UIApplicationWillEnterForegroundNotification - object:nil]; - [[NSNotificationCenter defaultCenter] addObserver:self - selector:@selector(didChangeStatusBarOrientation:) - name:UIApplicationDidChangeStatusBarOrientationNotification - object:nil]; - self.isAddedOberver = YES; - } -} - -//UIApplicationDidEnterBackgroundNotification NS_AVAILABLE_IOS(4_0); -//UIKIT_EXTERN NSNotificationName const UIApplicationWillEnterForegroundNotification - -- (void)sessionWasInterrupted:(NSNotification *)notification -{ - // In some scenarios we want to enable the user to resume the session running. - // For example, if music playback is initiated via control center while using AVCam, - // then the user can let AVCam resume the session running, which will stop music playback. - // Note that stopping music playback in control center will not automatically resume the session running. - // Also note that it is not always possible to resume, see -[resumeInterruptedSession:]. - BOOL showResumeButton = NO; - - // In iOS 9 and later, the userInfo dictionary contains information on why the session was interrupted. - if ( &AVCaptureSessionInterruptionReasonKey ) { - AVCaptureSessionInterruptionReason reason = [notification.userInfo[AVCaptureSessionInterruptionReasonKey] integerValue]; - //NSLog( @"Capture session was interrupted with reason %ld", (long)reason ); - - if ( reason == AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient || - reason == AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient ) { - showResumeButton = YES; - } - } -} - - -- (void)removeObservers -{ - if (self.isAddedOberver) { - [[NSNotificationCenter defaultCenter] removeObserver:self]; - [self.session removeObserver:self forKeyPath:@"running" context:SessionRunningContext]; - [self.stillImageOutput removeObserver:self forKeyPath:@"capturingStillImage" context:CapturingStillImageContext]; - [self.videoDeviceInput.device removeObserver:self forKeyPath:@"adjustingFocus"]; - self.isAddedOberver = NO; - } -} - -- (void)sessionRuntimeError:(NSNotification *)notification -{ - NSError *error = notification.userInfo[AVCaptureSessionErrorKey]; - //NSLog( @"Capture session runtime error: %@", error ); - - // Automatically try to restart the session running if media services were reset and the last start running succeeded. - // Otherwise, enable the user to try to resume the session running. - if ( error.code == AVErrorMediaServicesWereReset ) { - dispatch_async( self.sessionQueue, ^{ - if ( self.isSessionRunning ) { - [self.session startRunning]; - self.sessionRunning = self.session.isRunning; - } - else { - } - } ); - } -} - - -- (void)subjectAreaDidChange:(NSNotification *)notification -{ - [self resetFocus]; -} - -- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context -{ - if (context == CapturingStillImageContext) - { - // Flash/dim preview to indicate shutter action - BOOL isCapturingStillImage = [change[NSKeyValueChangeNewKey] boolValue]; - if ( isCapturingStillImage ) - { - dispatch_async(dispatch_get_main_queue(), ^{ - self.alpha = 0.0; - [UIView animateWithDuration:0.35 animations:^{ - self.alpha = 1.0; - }]; - }); - } - } - else if ([keyPath isEqualToString:@"adjustingFocus"]) - { - // Note: oldKey is not available (value is always NO it seems) so we only check on newKey - BOOL isFocusing = [change[NSKeyValueChangeNewKey] boolValue]; - if (self.startFocusResetTimerAfterFocusing == YES && !isFocusing && self.resetFocusTimeout > 0) - { - self.startFocusResetTimerAfterFocusing = NO; - - // Disengage manual focus after focusTimeout milliseconds - NSTimeInterval focusTimeoutSeconds = self.resetFocusTimeout / 1000; - self.focusResetTimer = [NSTimer scheduledTimerWithTimeInterval:focusTimeoutSeconds repeats:NO block:^(NSTimer *timer) { - [self resetFocus]; - }]; - } - } - else if (context == SessionRunningContext) - { -// BOOL isSessionRunning = [change[NSKeyValueChangeNewKey] boolValue]; -// -// dispatch_async( dispatch_get_main_queue(), ^{ -// // Only enable the ability to change camera if the device has more than one camera. -// self.cameraButton.enabled = isSessionRunning && ( [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo].count > 1 ); -// self.recordButton.enabled = isSessionRunning; -// self.stillButton.enabled = isSessionRunning; -// } ); - } - else - { - [super observeValueForKeyPath:keyPath ofObject:object change:change context:context]; - } -} - -#pragma mark - AVCaptureMetadataOutputObjectsDelegate - -- (void)captureOutput:(AVCaptureOutput *)output -didOutputMetadataObjects:(NSArray<__kindof AVMetadataObject *> *)metadataObjects - fromConnection:(AVCaptureConnection *)connection { - - for(AVMetadataObject *metadataObject in metadataObjects) - { - if ([metadataObject isKindOfClass:[AVMetadataMachineReadableCodeObject class]] && [self isSupportedBarCodeType:metadataObject.type]) { - - AVMetadataMachineReadableCodeObject *code = (AVMetadataMachineReadableCodeObject*)[self.previewLayer transformedMetadataObjectForMetadataObject:metadataObject]; - if (self.onReadCode && code.stringValue && ![code.stringValue isEqualToString:self.codeStringValue]) { - self.onReadCode(@{@"codeStringValue": code.stringValue}); - [self stopAnimatingScanner]; - } - } - } -} - -- (BOOL)isSupportedBarCodeType:(NSString *)currentType { - BOOL result = NO; - NSArray *supportedBarcodeTypes = @[AVMetadataObjectTypeUPCECode,AVMetadataObjectTypeCode39Code,AVMetadataObjectTypeCode39Mod43Code, - AVMetadataObjectTypeEAN13Code,AVMetadataObjectTypeEAN8Code, AVMetadataObjectTypeCode93Code, - AVMetadataObjectTypeCode128Code, AVMetadataObjectTypePDF417Code, AVMetadataObjectTypeQRCode, - AVMetadataObjectTypeAztecCode, AVMetadataObjectTypeDataMatrixCode, AVMetadataObjectTypeInterleaved2of5Code]; - for (NSString* object in supportedBarcodeTypes) { - if ([currentType isEqualToString:object]) { - result = YES; - } - } - return result; -} - -#pragma mark - String Constants For Scanner - -const NSString *offsetForScannerFrame = @"offsetFrame"; -const NSString *heightForScannerFrame = @"frameHeight"; -const NSString *colorForFrame = @"colorForFrame"; -const NSString *isNeedMultipleScanBarcode = @"isNeedMultipleScanBarcode"; - - -@end - diff --git a/ios/ReactNativeCameraKit/CKCamera.swift b/ios/ReactNativeCameraKit/CKCamera.swift deleted file mode 100644 index baa6064a40..0000000000 --- a/ios/ReactNativeCameraKit/CKCamera.swift +++ /dev/null @@ -1,121 +0,0 @@ -// -// CKCamera.swift -// ReactNativeCameraKit -// - -import AVFoundation -import UIKit - -class CKCamera1: UIView, AVCaptureMetadataOutputObjectsDelegate { - let label = UILabel() - - private var cameraType: CameraType? - private var flashMode: FlashMode? - private var torchMode: TorchMode? - private var focusMode: FocusMode? - private var zoomMode: ZoomMode? - private var ratioOverlay: String? - private var ratioOverlayColor: UIColor? - - // Barcode - private var onReadCode: RCTDirectEventBlock? - private var showFrame: Bool? - private var laserColor: UIColor? - private var frameColor: UIColor? - - private var onOrientationChange: RCTDirectEventBlock? - - private var resetFocusTimeout: Int? - private var resetFocusWhenMotionDetected: Bool? - - override init(frame: CGRect) { - super.init(frame: frame) - - label.text = "Hello world" - label.frame = CGRect(x: 0, y: 0, width: 100, height: 100) - addSubview(label) - - backgroundColor = .red - } - - @available(*, unavailable) - required init?(coder aDecoder: NSCoder) { - fatalError("init(coder:) has not been implemented") - } - - func snapStillImage(_ options: [String: Any], success: (_ imageObject: [String: Any]) -> (), onError:(_ error: String) -> ()) { - success(["uri":"SUCCESS!", "name":"OHOH!"]) - } - - // MARK: AVCaptureMetadataOutputObjectsDelegate - - func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) { - - } - - // MARK: Objective C setter - - @objc func setCameraType(_ cameraType: CameraType) { - self.cameraType = cameraType - } - - @objc func setFlashMode(_ flashMode: FlashMode) { - onReadCode?(["codeStringValue":"SUCCESS!", "name":"OHOH!"]) - - self.flashMode = flashMode - } - - @objc func setTorchMode(_ torchMode: TorchMode) { - if torchMode == .off { - backgroundColor = .red - } else { - backgroundColor = .green - } - - self.torchMode = torchMode - } - - @objc func setFocusMode(_ focusMode: FocusMode) { - self.focusMode = focusMode - } - - @objc func setZoomMode(_ zoomMode: ZoomMode) { - self.zoomMode = zoomMode - } - - @objc func setRatioOverlayColor(_ ratioOverlayColor: UIColor) { - self.ratioOverlayColor = ratioOverlayColor - } - - @objc func setRatioOverlay(_ ratioOverlay: String) { - self.ratioOverlay = ratioOverlay - } - - @objc func setOnReadCode(_ onReadCode: @escaping RCTDirectEventBlock) { - self.onReadCode = onReadCode - } - - @objc func setShowFrame(_ showFrame: Bool) { - self.showFrame = showFrame - } - - @objc func setLaserColor(_ laserColor: UIColor) { - self.laserColor = laserColor - } - - @objc func setFrameColor(_ frameColor: UIColor) { - self.frameColor = frameColor - } - - @objc func setOnOrientationChange(_ onOrientationChange: @escaping RCTDirectEventBlock) { - self.onOrientationChange = onOrientationChange - } - - @objc func setResetFocusTimeout(_ resetFocusTimeout: Int) { - self.resetFocusTimeout = resetFocusTimeout - } - - @objc func setResetFocusWhenMotionDetected(_ resetFocusWhenMotionDetected: Bool) { - self.resetFocusWhenMotionDetected = resetFocusWhenMotionDetected - } -} diff --git a/ios/ReactNativeCameraKit/CKCameraManager.m b/ios/ReactNativeCameraKit/CKCameraManager.m index 9bc98a392f..499902995a 100644 --- a/ios/ReactNativeCameraKit/CKCameraManager.m +++ b/ios/ReactNativeCameraKit/CKCameraManager.m @@ -16,13 +16,14 @@ @interface RCT_EXTERN_MODULE(CKCameraManager, RCTViewManager) RCT_EXPORT_VIEW_PROPERTY(cameraType, CKCameraType) -RCT_EXPORT_VIEW_PROPERTY(flashMode, CKCameraFlashMode) -RCT_EXPORT_VIEW_PROPERTY(torchMode, CKCameraTorchMode) -RCT_EXPORT_VIEW_PROPERTY(focusMode, CKCameraFocusMode) -RCT_EXPORT_VIEW_PROPERTY(zoomMode, CKCameraZoomMode) +RCT_EXPORT_VIEW_PROPERTY(flashMode, CKFlashMode) +RCT_EXPORT_VIEW_PROPERTY(torchMode, CKTorchMode) +RCT_EXPORT_VIEW_PROPERTY(focusMode, CKFocusMode) +RCT_EXPORT_VIEW_PROPERTY(zoomMode, CKZoomMode) RCT_EXPORT_VIEW_PROPERTY(ratioOverlay, NSString) RCT_EXPORT_VIEW_PROPERTY(ratioOverlayColor, UIColor) +RCT_EXPORT_VIEW_PROPERTY(scanBarcode, BOOL) RCT_EXPORT_VIEW_PROPERTY(onReadCode, RCTDirectEventBlock) RCT_EXPORT_VIEW_PROPERTY(onOrientationChange, RCTDirectEventBlock) RCT_EXPORT_VIEW_PROPERTY(showFrame, BOOL) @@ -35,8 +36,6 @@ @interface RCT_EXTERN_MODULE(CKCameraManager, RCTViewManager) resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject) -RCT_EXTERN_METHOD(setTorchMode:(NSString*)mode) - RCT_EXTERN_METHOD(checkDeviceCameraAuthorizationStatus:(RCTPromiseResolveBlock)resolve reject:(__unused RCTPromiseRejectBlock)reject) diff --git a/ios/ReactNativeCameraKit/CKCompressedImage.swift b/ios/ReactNativeCameraKit/CKCompressedImage.swift deleted file mode 100644 index 8535f9f350..0000000000 --- a/ios/ReactNativeCameraKit/CKCompressedImage.swift +++ /dev/null @@ -1,51 +0,0 @@ -// -// CKCompressedImage.swift -// ReactNativeCameraKit -// - -import UIKit - -enum ImageQuality: String { - case high - case medium - case original - - init(from string: String) { - self = ImageQuality(rawValue: string) ?? .original - } -} - -struct CKCompressedImage { - let image: UIImage - let data: Data? - - init(inputImage: UIImage, imageQuality: ImageQuality) { - var max: CGFloat = 1200.0 - - switch imageQuality { - case .high: - max = 1200.0 - case .medium: - max = 800.0 - case .original: - image = inputImage - data = inputImage.jpegData(compressionQuality: 1.0) - return - } - - let actualHeight = inputImage.size.height - let actualWidth = inputImage.size.width - - let imgRatio = actualWidth / actualHeight - - let newHeight = (actualHeight > actualWidth) ? max : max / imgRatio - let newWidth = (actualHeight > actualWidth) ? max * imgRatio : max - - let rect = CGRect(x: 0.0, y: 0.0, width: newWidth, height: newHeight) - UIGraphicsBeginImageContext(rect.size) - inputImage.draw(in: rect) - image = UIGraphicsGetImageFromCurrentImageContext()! - UIGraphicsEndImageContext() - self.data = image.jpegData(compressionQuality: 0.85) - } -} diff --git a/ios/ReactNativeCameraKit/CKOverlayObject.swift b/ios/ReactNativeCameraKit/CKOverlayObject.swift deleted file mode 100644 index 008d160175..0000000000 --- a/ios/ReactNativeCameraKit/CKOverlayObject.swift +++ /dev/null @@ -1,36 +0,0 @@ -// -// CKOverlayObject.swift -// ReactNativeCameraKit -// - -import Foundation - -struct CKOverlayObject: CustomStringConvertible { - let width: Float - let height: Float - let ratio: Float - - init(from inputString: String) { - let values = inputString.split(separator: ":") - - if values.count == 2, - let inputHeight = Float(values[0]), - let inputWidth = Float(values[1]), - inputHeight != 0, - inputWidth != 0 { - height = inputHeight - width = inputWidth - ratio = width / height - } else { - height = 0 - width = 0 - ratio = 0 - } - } - - // MARK: CustomStringConvertible - - var description: String { - return "height:\(height) width:\(width) ratio:\(ratio)" - } -} diff --git a/ios/ReactNativeCameraKit/CKRatioOverlayView.swift b/ios/ReactNativeCameraKit/CKRatioOverlayView.swift deleted file mode 100644 index a12a67878b..0000000000 --- a/ios/ReactNativeCameraKit/CKRatioOverlayView.swift +++ /dev/null @@ -1,121 +0,0 @@ -// -// CKRatioOverlayView.swift -// ReactNativeCameraKit -// - -import UIKit - -@objc(CKRatioOverlayView) -public class CKRatioOverlayView: UIView { - private var overlayObject: CKOverlayObject - - private let topView: UIView = UIView() - private let centerView: UIView = UIView() - private let bottomView: UIView = UIView() - - // MARK: - Public - - @objc(initWithFrame:ratioString:overlayColor:) - public init(frame: CGRect, ratioString: String, overlayColor: UIColor?) { - overlayObject = CKOverlayObject(from: ratioString) - - let color = overlayColor ?? UIColor.black.withAlphaComponent(0.3) - topView.backgroundColor = color - bottomView.backgroundColor = color - - super.init(frame: frame) - - addSubview(topView) - addSubview(centerView) - addSubview(bottomView) - - setOverlayParts() - } - - @available(*, unavailable) - required init?(coder aDecoder: NSCoder) { - fatalError("init(coder:) has not been implemented") - } - - @objc public func setRatio(_ ratioString: String) { - overlayObject = CKOverlayObject(from: ratioString) - - UIView.animate(withDuration: 0.2) { - self.setOverlayParts() - } - } - - // MARK: - Private - - private func setOverlayParts() { - guard overlayObject.ratio != 0 else { - isHidden = true - - return - } - - isHidden = false - - var centerSize = CGSize.zero - var sideSize = CGSize.zero - - if overlayObject.width < overlayObject.height { - centerSize.width = frame.size.width - centerSize.height = frame.size.height * CGFloat(overlayObject.ratio) - - sideSize.width = centerSize.width - sideSize.height = (frame.size.height - centerSize.height) / 2.0 - - topView.frame = CGRect(x: 0, - y: 0, - width: sideSize.width, - height: sideSize.height) - centerView.frame = CGRect(x: 0, - y: topView.frame.size.height + topView.frame.origin.y, - width: centerSize.width, - height: centerSize.height) - bottomView.frame = CGRect(x: 0, - y: centerView.frame.size.height + centerView.frame.origin.y, - width: sideSize.width, - height: sideSize.height) - } else if overlayObject.width > overlayObject.height { - centerSize.width = frame.size.width / CGFloat(overlayObject.ratio) - centerSize.height = frame.size.height - - sideSize.width = (frame.size.width - centerSize.width) / 2.0 - sideSize.height = centerSize.height - - topView.frame = CGRect(x: 0, - y: 0, - width: sideSize.width, - height: sideSize.height) - centerView.frame = CGRect(x: topView.frame.size.width + topView.frame.origin.x, - y: 0, - width: centerSize.width, - height: centerSize.height) - bottomView.frame = CGRect(x: centerView.frame.size.width + centerView.frame.origin.x, - y: 0, - width: sideSize.width, - height: sideSize.height) - } else { // ratio is 1:1 - centerSize.width = frame.size.width - centerSize.height = frame.size.width - - sideSize.width = centerSize.width - sideSize.height = (frame.size.height - centerSize.height) / 2.0 - - topView.frame = CGRect(x: 0, - y: 0, - width: sideSize.width, - height: sideSize.height) - centerView.frame = CGRect(x: 0, - y: topView.frame.size.height + topView.frame.origin.y, - width: centerSize.width, - height: centerSize.height) - bottomView.frame = CGRect(x: 0, - y: centerView.frame.size.height + centerView.frame.origin.y, - width: sideSize.width, - height: sideSize.height) - } - } -} diff --git a/ios/ReactNativeCameraKit/CKTypes+RCTConvert.m b/ios/ReactNativeCameraKit/CKTypes+RCTConvert.m index 6fb5bb447a..8156fa821f 100644 --- a/ios/ReactNativeCameraKit/CKTypes+RCTConvert.m +++ b/ios/ReactNativeCameraKit/CKTypes+RCTConvert.m @@ -1,5 +1,5 @@ // -// CKCamera+RCTConvert.m +// CKTypes+RCTConvert.m // ReactNativeCameraKit // @@ -20,25 +20,25 @@ @implementation RCTConvert (CKTypes) @"front": @(CKCameraTypeFront) }), CKCameraTypeBack, integerValue) -RCT_ENUM_CONVERTER(CKCameraFlashMode, (@{ - @"on": @(CKCameraFlashModeOn), - @"off": @(CKCameraFlashModeOff), - @"auto": @(CKCameraFlashModeAuto) -}), CKCameraFlashModeAuto, integerValue) - -RCT_ENUM_CONVERTER(CKCameraTorchMode, (@{ - @"on": @(CKCameraTorchModeOn), - @"off": @(CKCameraTorchModeOff) -}), CKCameraTorchModeOn, integerValue) - -RCT_ENUM_CONVERTER(CKCameraFocusMode, (@{ - @"on": @(CKCameraFocusModeOn), - @"off": @(CKCameraFocusModeOff) -}), CKCameraFocusModeOn, integerValue) - -RCT_ENUM_CONVERTER(CKCameraZoomMode, (@{ - @"on": @(CKCameraZoomModeOn), - @"off": @(CKCameraZoomModeOff) -}), CKCameraZoomModeOn, integerValue) +RCT_ENUM_CONVERTER(CKFlashMode, (@{ + @"on": @(CKFlashModeOn), + @"off": @(CKFlashModeOff), + @"auto": @(CKFlashModeAuto) +}), CKFlashModeAuto, integerValue) + +RCT_ENUM_CONVERTER(CKTorchMode, (@{ + @"on": @(CKTorchModeOn), + @"off": @(CKTorchModeOff) +}), CKTorchModeOn, integerValue) + +RCT_ENUM_CONVERTER(CKFocusMode, (@{ + @"on": @(CKFocusModeOn), + @"off": @(CKFocusModeOff) +}), CKFocusModeOn, integerValue) + +RCT_ENUM_CONVERTER(CKZoomMode, (@{ + @"on": @(CKZoomModeOn), + @"off": @(CKZoomModeOff) +}), CKZoomModeOn, integerValue) @end diff --git a/ios/ReactNativeCameraKit/CKTypes.swift b/ios/ReactNativeCameraKit/CKTypes.swift deleted file mode 100644 index dc2519e5f0..0000000000 --- a/ios/ReactNativeCameraKit/CKTypes.swift +++ /dev/null @@ -1,85 +0,0 @@ -// -// CKTypes.swift -// ReactNativeCameraKit -// - -import AVFoundation -import Foundation - -// Dummy class used for RCTConvert -@objc class CKTypes: NSObject {} - -@objc(CKCameraType) -public enum CameraType: Int { - case back - case front - - var avPosition: AVCaptureDevice.Position { - switch self { - case .back: return .back - case .front: return .front - } - } -} - -@objc(CKCameraFlashMode) -public enum FlashMode: Int { - case on - case off - case auto - - var avFlashMode: AVCaptureDevice.FlashMode { - switch self { - case .on: return .on - case .off: return .off - case .auto: return .auto - } - } -} - -@objc(CKCameraTorchMode) -public enum TorchMode: Int { - case on - case off - - init(from string: String) { - switch string { - case "on": self = .on - default: self = .off - } - } - - var avTorchMode: AVCaptureDevice.TorchMode { - switch self { - case .on: return .on - case .off: return .off - } - } -} - -@objc(CKCameraFocusMode) -public enum FocusMode: Int { - case on - case off -} - -@objc(CKCameraZoomMode) -public enum ZoomMode: Int { - case on - case off -} - -// Temporary method to fill gap with ObjC -@objc public class EnumHelper: NSObject { - @objc public static func cameraTypeToAVPosition(_ cameraType: CameraType) -> AVCaptureDevice.Position { - return cameraType.avPosition - } - - @objc public static func flashModeToAVFlashMode(_ flashMode: FlashMode) -> AVCaptureDevice.FlashMode { - return flashMode.avFlashMode - } - - @objc public static func torchModeToAVTorchMode(_ torchMode: TorchMode) -> AVCaptureDevice.TorchMode { - return torchMode.avTorchMode - } -} diff --git a/ios/ReactNativeCameraKit/CKCameraManager.swift b/ios/ReactNativeCameraKit/CameraManager.swift similarity index 68% rename from ios/ReactNativeCameraKit/CKCameraManager.swift rename to ios/ReactNativeCameraKit/CameraManager.swift index 914d124d96..8750016f07 100644 --- a/ios/ReactNativeCameraKit/CKCameraManager.swift +++ b/ios/ReactNativeCameraKit/CameraManager.swift @@ -1,20 +1,23 @@ // -// CKCameraManager.swift +// CameraManager.swift // ReactNativeCameraKit // import AVFoundation import Foundation -@objc(CKCameraManager) public class CKCameraManager: RCTViewManager { - var camera: CKCamera! +/* + * Class that manages the communication between React Native and the native implementation + */ +@objc(CKCameraManager) public class CameraManager: RCTViewManager { + var camera: CameraView! override public static func requiresMainQueueSetup() -> Bool { return true } override public func view() -> UIView! { - camera = CKCamera() + camera = CameraView() return camera } @@ -22,14 +25,9 @@ import Foundation @objc func capture(_ options: NSDictionary, resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) { - camera.snapStillImage(options as! [String: Any], - success: { resolve($0) }, - onError: { reject("capture_error", $0, nil) }) - } - - @objc func setTorchMode(_ modeString: String) { - let mode = TorchMode(from: modeString) - camera.setTorchMode(mode.avTorchMode) + camera.capture(options as! [String: Any], + onSuccess: { resolve($0) }, + onError: { reject("capture_error", $0, nil) }) } @objc func checkDeviceCameraAuthorizationStatus(_ resolve: @escaping RCTPromiseResolveBlock, diff --git a/ios/ReactNativeCameraKit/CameraProtocol.swift b/ios/ReactNativeCameraKit/CameraProtocol.swift new file mode 100644 index 0000000000..16347d61ba --- /dev/null +++ b/ios/ReactNativeCameraKit/CameraProtocol.swift @@ -0,0 +1,27 @@ +// +// CameraProtocol.swift +// ReactNativeCameraKit +// + +import AVFoundation + +protocol CameraProtocol: AnyObject, FocusInterfaceViewDelegate { + var previewView: UIView { get } + + func setup() + func cameraRemovedFromSuperview() + + func update(zoomVelocity: CGFloat) + func update(torchMode: TorchMode) + func update(flashMode: FlashMode) + func update(cameraType: CameraType) + + func isBarcodeScannerEnabled(_ isEnabled: Bool, + supportedBarcodeType: [AVMetadataObject.ObjectType], + onReadCode: RCTDirectEventBlock?) + func update(scannerFrameSize: CGRect?) + + func capturePicture(onWillCapture: @escaping () -> Void, + onSuccess: @escaping (_ imageData: Data) -> (), + onError: @escaping (_ message: String) -> ()) +} diff --git a/ios/ReactNativeCameraKit/CameraView.swift b/ios/ReactNativeCameraKit/CameraView.swift new file mode 100644 index 0000000000..76544473c7 --- /dev/null +++ b/ios/ReactNativeCameraKit/CameraView.swift @@ -0,0 +1,321 @@ +// +// CameraView.swift +// ReactNativeCameraKit +// + +import AVFoundation +import UIKit + +import os.signpost + +@available(iOS 12.0, *) +let log = OSLog( + subsystem: "com.bertet.app", + category: .pointsOfInterest +) + +/* + * View that abtracts the logic unrelated to the actual camera + * Like permission, ratio overlay, focus, zoom gesture, write image, etc + */ +@objc(CKCameraView) +class CameraView: UIView { + private let camera: CameraProtocol + + // Focus + private let focusInterfaceView: FocusInterfaceView + + // scanner + private var scannerInterfaceView: ScannerInterfaceView + private var supportedBarcodeType: [AVMetadataObject.ObjectType] = [.upce, .code39, .code39Mod43, + .ean13, .ean8, .code93, + .code128, .pdf417, .qr, + .aztec, .dataMatrix, .interleaved2of5] + // camera + private var ratioOverlayView: RatioOverlayView? + + // gestures + private var zoomGestureRecognizer: UIPinchGestureRecognizer? + + // props + // camera settings + @objc var cameraType: CameraType = .back + @objc var flashMode: FlashMode = .auto + @objc var torchMode: TorchMode = .off + // ratio overlay + @objc var ratioOverlay: String? + @objc var ratioOverlayColor: UIColor? + // scanner + @objc var scanBarcode = false + @objc var showFrame = false + @objc var onReadCode: RCTDirectEventBlock? + @objc var frameColor: UIColor? + @objc var laserColor: UIColor? + // other + @objc var onOrientationChange: RCTDirectEventBlock? + @objc var resetFocusTimeout = 0 + @objc var resetFocusWhenMotionDetected = false + @objc var focusMode: FocusMode = .on + @objc var zoomMode: ZoomMode = .on + + // MARK: Lifecycle + + @available(*, unavailable) + required init?(coder aDecoder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + override init(frame: CGRect) { + if #available(iOS 12.0, *) { + os_signpost(.begin, log: log, name: "init CameraView") + } + + print("--------- init CameraView") +#if targetEnvironment(simulator) + camera = SimulatorCamera() +#else + camera = RealCamera() +#endif + + scannerInterfaceView = ScannerInterfaceView(frameColor: .white, laserColor: .red) + focusInterfaceView = FocusInterfaceView() + + super.init(frame: frame) + + addSubview(camera.previewView) + + addSubview(scannerInterfaceView) + scannerInterfaceView.isHidden = true + + addSubview(focusInterfaceView) + focusInterfaceView.delegate = camera + + // Listen to orientation changes + UIDevice.current.beginGeneratingDeviceOrientationNotifications() + NotificationCenter.default.addObserver(forName: UIDevice.orientationDidChangeNotification, + object: UIDevice.current, + queue: nil, + using: { [weak self] notification in self?.orientationChanged(notification: notification) }) + + handleCameraPermission() + if #available(iOS 12.0, *) { + os_signpost(.end, log: log, name: "init CameraView") + } + } + + override func removeFromSuperview() { + camera.cameraRemovedFromSuperview() + + NotificationCenter.default.removeObserver(self, name: UIDevice.orientationDidChangeNotification, object: UIDevice.current) + + super.removeFromSuperview() + } + + deinit { +// removeObservers() + UIDevice.current.endGeneratingDeviceOrientationNotifications() + } + + // MARK: React lifecycle + + override func reactSetFrame(_ frame: CGRect) { + super.reactSetFrame(frame) + + print("---------- CameraView reactSetFrame \(frame) \(bounds) \(Thread.current)") + + camera.previewView.frame = bounds + + scannerInterfaceView.frame = bounds + // If frame size changes, we have to update the scanner + camera.update(scannerFrameSize: showFrame ? scannerInterfaceView.frameSize : nil) + + focusInterfaceView.frame = bounds + + ratioOverlayView?.frame = bounds + } + + override func removeReactSubview(_ subview: UIView) { + print("---------- removeReactSubview \(subview)") + subview.removeFromSuperview() + super.removeReactSubview(subview) + } + + // Called once when all props have been set, then every time one is updated + override func didSetProps(_ changedProps: [String]) { + if #available(iOS 12.0, *) { + os_signpost(.begin, log: log, name: "didSetProps") + } + print("------ didSetProps \(changedProps) \(Thread.current)") + + // Camera settings + if changedProps.contains("cameraType") { + camera.update(cameraType: cameraType) + } + if changedProps.contains("flashMode") { + camera.update(flashMode: flashMode) + } + if changedProps.contains("cameraType") || changedProps.contains("torchMode") { + camera.update(torchMode: torchMode) + } + + // Ratio overlay + if changedProps.contains("ratioOverlay") { + if let ratioOverlay { + if let ratioOverlayView { + ratioOverlayView.setRatio(ratioOverlay) + } else { + ratioOverlayView = RatioOverlayView(frame: bounds, ratioString: ratioOverlay, overlayColor: ratioOverlayColor) + addSubview(ratioOverlayView!) + } + } else { + ratioOverlayView?.removeFromSuperview() + ratioOverlayView = nil + } + } + + if changedProps.contains("ratioOverlayColor"), let ratioOverlayColor { + ratioOverlayView?.setColor(ratioOverlayColor) + } + + // Scanner + if changedProps.contains("scanBarcode") || changedProps.contains("onReadCode") { + camera.isBarcodeScannerEnabled(scanBarcode, + supportedBarcodeType: supportedBarcodeType, + onReadCode: onReadCode) + } + + if changedProps.contains("showFrame") || changedProps.contains("scanBarcode") { + DispatchQueue.main.async { + self.scannerInterfaceView.isHidden = !self.showFrame + + self.camera.update(scannerFrameSize: self.showFrame ? self.scannerInterfaceView.frameSize : nil) + } + } + + if changedProps.contains("laserColor"), let laserColor { + scannerInterfaceView.update(laserColor: laserColor) + } + + if changedProps.contains("frameColor"), let frameColor { + scannerInterfaceView.update(frameColor: frameColor) + } + + // Others + if changedProps.contains("focusMode") { + focusInterfaceView.update(focusMode: focusMode) + } + if changedProps.contains("resetFocusTimeout") { + focusInterfaceView.update(resetFocusTimeout: resetFocusTimeout) + } + if changedProps.contains("resetFocusWhenMotionDetected") { + focusInterfaceView.update(resetFocusWhenMotionDetected: resetFocusWhenMotionDetected) + } + + if changedProps.contains("zoomMode") { + if zoomMode == .on { + if (zoomGestureRecognizer == nil) { + let pinchGesture = UIPinchGestureRecognizer(target: self, action: #selector(handlePinchToZoomRecognizer(_:))) + addGestureRecognizer(pinchGesture) + zoomGestureRecognizer = pinchGesture + } + } else { + if let zoomGestureRecognizer { + removeGestureRecognizer(zoomGestureRecognizer) + self.zoomGestureRecognizer = nil + } + } + } + + if #available(iOS 12.0, *) { + os_signpost(.end, log: log, name: "didSetProps") + } + } + + // MARK: Public + + func capture(_ options: [String: Any], + onSuccess: @escaping (_ imageObject: [String: Any]) -> (), + onError: @escaping (_ error: String) -> ()) { + camera.capturePicture(onWillCapture: { [weak self] in + // Flash/dim preview to indicate shutter action + DispatchQueue.main.async { + self?.camera.previewView.alpha = 0 + UIView.animate(withDuration: 0.35, animations: { + self?.camera.previewView.alpha = 1 + }) + } + }, onSuccess: { [weak self] imageData in + DispatchQueue.global(qos: .default).async { + self?.writeCaptured(imageData: imageData, onSuccess: onSuccess, onError: onError) + + self?.focusInterfaceView.resetFocus() + } + }, onError: onError) + } + + // MARK: - Private Helper + + private func handleCameraPermission() { + switch AVCaptureDevice.authorizationStatus(for: .video) { + case .authorized: + // The user has previously granted access to the camera. + camera.setup() + break + case .notDetermined: + // The user has not yet been presented with the option to grant video access. + AVCaptureDevice.requestAccess(for: .video) { [weak self] granted in + if granted { + self?.camera.setup() + } + } + default: + // The user has previously denied access. + break + } + } + + private func writeCaptured(imageData: Data, + onSuccess: @escaping (_ imageObject: [String: Any]) -> (), + onError: @escaping (_ error: String) -> ()) { + do { + let temporaryFileURL = try saveToTmpFolder(imageData) + onSuccess([ + "size": imageData.count, + "uri": temporaryFileURL.description, + "name": temporaryFileURL.lastPathComponent + ]) + } catch { + let errorMessage = "Error occurred while writing image data to a temporary file: \(error)" + print(errorMessage) + onError(errorMessage) + } + } + + private func saveToTmpFolder(_ data: Data) throws -> URL { + let temporaryFileName = ProcessInfo.processInfo.globallyUniqueString + let temporaryFilePath = (NSTemporaryDirectory() as NSString).appendingPathComponent(temporaryFileName).appending(".jpg") + let temporaryFileURL = URL(fileURLWithPath: temporaryFilePath) + + try data.write(to: temporaryFileURL, options: .atomic) + + return temporaryFileURL + } + + private func orientationChanged(notification: Notification) { + guard let onOrientationChange, + let device = notification.object as? UIDevice, + let orientation = Orientation(from: device.orientation) else { + return + } + + onOrientationChange(["orientation": orientation.rawValue]) + } + + // MARK: - Gesture selectors + + @objc func handlePinchToZoomRecognizer(_ pinchRecognizer: UIPinchGestureRecognizer) { + if pinchRecognizer.state == .changed { + camera.update(zoomVelocity: pinchRecognizer.velocity) + } + } +} diff --git a/ios/ReactNativeCameraKit/FocusInterfaceView.swift b/ios/ReactNativeCameraKit/FocusInterfaceView.swift new file mode 100644 index 0000000000..10856eae62 --- /dev/null +++ b/ios/ReactNativeCameraKit/FocusInterfaceView.swift @@ -0,0 +1,216 @@ +// +// FocusInterfaceView.swift +// ReactNativeCameraKit +// + +import UIKit +import AVFoundation + +enum FocusBehavior { + case customFocus(resetFocusWhenMotionDetected: Bool, resetFocus: () -> Void, focusFinished: () -> Void) + case continuousAutoFocus + + var isSubjectAreaChangeMonitoringEnabled: Bool { + switch self { + case let .customFocus(resetFocusWhenMotionDetected, _, _): + return true && resetFocusWhenMotionDetected + case .continuousAutoFocus: + return false + } + } + + var avFocusMode: AVCaptureDevice.FocusMode { + switch self { + case .customFocus: + return .autoFocus + case .continuousAutoFocus: + return .continuousAutoFocus + } + } + + var exposureMode: AVCaptureDevice.ExposureMode { + switch self { + case .customFocus: + return .autoExpose + case .continuousAutoFocus: + return .continuousAutoExposure + } + } +} + +protocol FocusInterfaceViewDelegate: AnyObject { + func focus(at touchPoint: CGPoint, focusBehavior: FocusBehavior) +} + +/* + * Full screen focus interface + */ +class FocusInterfaceView: UIView { + weak var delegate: FocusInterfaceViewDelegate? + + private var resetFocusTimeout = 0 + private var resetFocusWhenMotionDetected = false + + private let focusView: UIView = UIView(frame: .zero) + private var hideFocusViewTimer: Timer? + private var focusResetTimer: Timer? + private var startFocusResetTimerAfterFocusing: Bool = false + private var tapToFocusEngaged: Bool = false + + private var focusGestureRecognizer: UITapGestureRecognizer? + + // MARK: - Lifecycle + + override init(frame: CGRect) { + super.init(frame: frame) + + focusView.backgroundColor = .clear + focusView.layer.borderColor = UIColor.yellow.cgColor + focusView.layer.borderWidth = 1 + focusView.isHidden = true + addSubview(focusView) + + isUserInteractionEnabled = true + } + + override func touchesBegan(_ touches: Set, with event: UIEvent?) { + super.touchesBegan(touches, with: event) + print(touches) + } + + @available(*, unavailable) + required init?(coder aDecoder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + // MARK: - Public + + func update(focusMode: FocusMode) { + if focusMode == .on { + if (focusGestureRecognizer == nil) { + let tapGesture = UITapGestureRecognizer(target: self, action: #selector(focusAndExposeTap(_:))) + addGestureRecognizer(tapGesture) + focusGestureRecognizer = tapGesture + } + } else { + if let focusGestureRecognizer { + removeGestureRecognizer(focusGestureRecognizer) + self.focusGestureRecognizer = nil + } + } + } + + func update(resetFocusTimeout: Int) { + self.resetFocusTimeout = resetFocusTimeout + } + + func update(resetFocusWhenMotionDetected: Bool) { + self.resetFocusWhenMotionDetected = resetFocusWhenMotionDetected + } + + func focusFinished() { + if startFocusResetTimerAfterFocusing, resetFocusTimeout > 0 { + startFocusResetTimerAfterFocusing = false + + // Disengage manual focus after focusTimeout milliseconds + let focusTimeoutSeconds = TimeInterval(self.resetFocusTimeout) / 1000 + focusResetTimer = Timer.scheduledTimer(withTimeInterval: focusTimeoutSeconds, + repeats: false) { [weak self] _ in + self?.resetFocus() + } + } + } + + func resetFocus() { + if let focusResetTimer { + focusResetTimer.invalidate() + self.focusResetTimer = nil + } + + // Resetting focus to continuous focus, so not interested in resetting anymore + startFocusResetTimerAfterFocusing = false + + // Avoid showing reset-focus animation after each photo capture + if !tapToFocusEngaged { + return + } + tapToFocusEngaged = false + + DispatchQueue.main.async { + let layerCenter = self.center + + // Reset current camera focus + self.delegate?.focus(at: layerCenter, focusBehavior: .continuousAutoFocus) + + // Create animation to indicate the new focus location + let halfDiagonal: CGFloat = 123 + let halfDiagonalAnimation = halfDiagonal * 2 + + let focusViewFrame = CGRect(x: layerCenter.x - (halfDiagonal / 2), + y: layerCenter.y - (halfDiagonal / 2), + width: halfDiagonal, + height: halfDiagonal) + let focusViewFrameForAnimation = CGRect(x: layerCenter.x - (halfDiagonalAnimation / 2), + y: layerCenter.y - (halfDiagonalAnimation / 2), + width: halfDiagonalAnimation, + height: halfDiagonalAnimation) + + self.focusView.alpha = 0 + self.focusView.isHidden = false + self.focusView.frame = focusViewFrameForAnimation + + UIView.animate(withDuration: 0.2, animations: { + self.focusView.frame = focusViewFrame + self.focusView.alpha = 1 + }) { _ in + self.hideFocusViewTimer?.invalidate() + self.hideFocusViewTimer = Timer.scheduledTimer(withTimeInterval: 2, repeats: false) { [weak self] _ in + guard let self else { return } + UIView.animate(withDuration: 0.2, animations: { + self.focusView.alpha = 0 + }) { _ in + self.focusView.isHidden = true + } + } + } + } + } + + // MARK: - Gesture selectors + + @objc func focusAndExposeTap(_ gestureRecognizer: UIGestureRecognizer) { + let touchPoint = gestureRecognizer.location(in: self) + delegate?.focus(at: touchPoint, + focusBehavior: .customFocus(resetFocusWhenMotionDetected: resetFocusWhenMotionDetected, + resetFocus: resetFocus, + focusFinished: focusFinished)) + + // Disengage manual focus once focusing finishing (if focusTimeout > 0) + // See [self observeValueForKeyPath] + focusResetTimer?.invalidate() + hideFocusViewTimer?.invalidate() + startFocusResetTimerAfterFocusing = true + tapToFocusEngaged = true + + // Animate focus rectangle + let halfDiagonal: CGFloat = 73 + let halfDiagonalAnimation = halfDiagonal * 2 + + let focusViewFrame = CGRect(x: touchPoint.x - (halfDiagonal / 2), + y: touchPoint.y - (halfDiagonal / 2), + width: halfDiagonal, + height: halfDiagonal) + + focusView.alpha = 0 + focusView.isHidden = false + focusView.frame = CGRect(x: touchPoint.x - (halfDiagonalAnimation / 2), + y: touchPoint.y - (halfDiagonalAnimation / 2), + width: halfDiagonalAnimation, + height: halfDiagonalAnimation) + + UIView.animate(withDuration: 0.2, animations: { + self.focusView.frame = focusViewFrame + self.focusView.alpha = 1 + }) + } +} diff --git a/ios/ReactNativeCameraKit/PhotoCaptureDelegate.swift b/ios/ReactNativeCameraKit/PhotoCaptureDelegate.swift new file mode 100644 index 0000000000..fe1e62b81f --- /dev/null +++ b/ios/ReactNativeCameraKit/PhotoCaptureDelegate.swift @@ -0,0 +1,48 @@ +// +// PhotoCaptureDelegate.swift +// ReactNativeCameraKit +// + +import AVFoundation + +/* + * AVCapturePhotoCapture is using a delegation pattern, this class makes it more convenient with a closure pattern. + */ +class PhotoCaptureDelegate: NSObject, AVCapturePhotoCaptureDelegate { + private(set) var requestedPhotoSettings: AVCapturePhotoSettings + + private let onWillCapture: () -> Void + private let onCaptureSuccess: (_ uniqueID: Int64, _ imageData: Data) -> Void + private let onCaptureError: (_ uniqueID: Int64, _ message: String) -> Void + + init(with requestedPhotoSettings: AVCapturePhotoSettings, + onWillCapture: @escaping () -> Void, + onCaptureSuccess: @escaping (_ uniqueID: Int64, _ imageData: Data) -> Void, + onCaptureError: @escaping (_ uniqueID: Int64, _ errorMessage: String) -> Void) { + self.requestedPhotoSettings = requestedPhotoSettings + self.onWillCapture = onWillCapture + self.onCaptureSuccess = onCaptureSuccess + self.onCaptureError = onCaptureError + } + + // MARK: - AVCapturePhotoCaptureDelegate + + func photoOutput(_ output: AVCapturePhotoOutput, willCapturePhotoFor resolvedSettings: AVCaptureResolvedPhotoSettings) { + onWillCapture() + } + + func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) { + if let error = error { + print("Could not capture still image: \(error)") + onCaptureError(requestedPhotoSettings.uniqueID, "Could not capture still image") + return + } + + guard let imageData = photo.fileDataRepresentation() else { + onCaptureError(requestedPhotoSettings.uniqueID, "Could not capture still image") + return + } + + onCaptureSuccess(requestedPhotoSettings.uniqueID, imageData) + } +} diff --git a/ios/ReactNativeCameraKit/RatioOverlayView.swift b/ios/ReactNativeCameraKit/RatioOverlayView.swift new file mode 100644 index 0000000000..242a2a6749 --- /dev/null +++ b/ios/ReactNativeCameraKit/RatioOverlayView.swift @@ -0,0 +1,164 @@ +// +// RatioOverlayView.swift +// ReactNativeCameraKit +// + +import UIKit + +struct RatioOverlayData: CustomStringConvertible { + let width: Float + let height: Float + let ratio: Float + + init(from inputString: String) { + let values = inputString.split(separator: ":") + + if values.count == 2, + let inputHeight = Float(values[0]), + let inputWidth = Float(values[1]), + inputHeight != 0, + inputWidth != 0 { + height = inputHeight + width = inputWidth + ratio = width / height + } else { + height = 0 + width = 0 + ratio = 0 + } + } + + // MARK: CustomStringConvertible + + var description: String { + return "height:\(height) width:\(width) ratio:\(ratio)" + } +} + +/* + * Full screen overlay that can appear on top of the camera as an hint for the expected ratio + */ +class RatioOverlayView: UIView { + private var ratioData: RatioOverlayData? + + private let topView: UIView = UIView() + private let bottomView: UIView = UIView() + + // MARK: - Lifecycle + + init(frame: CGRect, ratioString: String, overlayColor: UIColor?) { + super.init(frame: frame) + + isUserInteractionEnabled = false + + let color = overlayColor ?? UIColor.black.withAlphaComponent(0.3) + setColor(color) + + addSubview(topView) + addSubview(bottomView) + + setRatio(ratioString) + } + + @available(*, unavailable) + required init?(coder aDecoder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + override func layoutSubviews() { + super.layoutSubviews() + + setOverlayParts() + } + + // MARK: - Public + + func setRatio(_ ratioString: String) { + ratioData = RatioOverlayData(from: ratioString) + + UIView.animate(withDuration: 0.2) { + self.setOverlayParts() + } + } + + func setColor(_ color: UIColor) { + print("setColor \(color)") + topView.backgroundColor = color + bottomView.backgroundColor = color + } + + // MARK: - Private + + private func setOverlayParts() { + guard let ratioData, ratioData.ratio != 0 else { + isHidden = true + + return + } + + isHidden = false + + var centerSize = CGSize.zero + var sideSize = CGSize.zero + var centerFrame: CGRect + + if ratioData.width < ratioData.height { + centerSize.width = frame.size.width + centerSize.height = frame.size.height * CGFloat(ratioData.ratio) + + sideSize.width = centerSize.width + sideSize.height = (frame.size.height - centerSize.height) / 2.0 + + topView.frame = CGRect(x: 0, + y: 0, + width: sideSize.width, + height: sideSize.height) + centerFrame = CGRect(x: 0, + y: topView.frame.size.height + topView.frame.origin.y, + width: centerSize.width, + height: centerSize.height) + bottomView.frame = CGRect(x: 0, + y: centerFrame.size.height + centerFrame.origin.y, + width: sideSize.width, + height: sideSize.height) + } else if ratioData.width > ratioData.height { + centerSize.width = frame.size.width / CGFloat(ratioData.ratio) + centerSize.height = frame.size.height + + sideSize.width = (frame.size.width - centerSize.width) / 2.0 + sideSize.height = centerSize.height + + topView.frame = CGRect(x: 0, + y: 0, + width: sideSize.width, + height: sideSize.height) + centerFrame = CGRect(x: topView.frame.size.width + topView.frame.origin.x, + y: 0, + width: centerSize.width, + height: centerSize.height) + bottomView.frame = CGRect(x: centerFrame.size.width + centerFrame.origin.x, + y: 0, + width: sideSize.width, + height: sideSize.height) + } else { // ratio is 1:1 + centerSize.width = frame.size.width + centerSize.height = frame.size.width + + sideSize.width = centerSize.width + sideSize.height = (frame.size.height - centerSize.height) / 2.0 + + topView.frame = CGRect(x: 0, + y: 0, + width: sideSize.width, + height: sideSize.height) + centerFrame = CGRect(x: 0, + y: topView.frame.size.height + topView.frame.origin.y, + width: centerSize.width, + height: centerSize.height) + bottomView.frame = CGRect(x: 0, + y: centerFrame.size.height + centerFrame.origin.y, + width: sideSize.width, + height: sideSize.height) + } + } +} diff --git a/ios/ReactNativeCameraKit/ReactNativeCameraKit-Bridging-Header.h b/ios/ReactNativeCameraKit/ReactNativeCameraKit-Bridging-Header.h index 20b30561f2..a80a2d3fc1 100644 --- a/ios/ReactNativeCameraKit/ReactNativeCameraKit-Bridging-Header.h +++ b/ios/ReactNativeCameraKit/ReactNativeCameraKit-Bridging-Header.h @@ -7,9 +7,11 @@ #import #import #import +#import #else #import "RCTBridgeModule.h" #import "RCTViewManager.h" #import "RCTConvert.h" #import "RCTEventEmitter.h" +#import "UIView+React.h" #endif diff --git a/ios/ReactNativeCameraKit/RealCamera.swift b/ios/ReactNativeCameraKit/RealCamera.swift new file mode 100644 index 0000000000..5629543f96 --- /dev/null +++ b/ios/ReactNativeCameraKit/RealCamera.swift @@ -0,0 +1,493 @@ +// +// RealCamera.swift +// ReactNativeCameraKit +// + +import AVFoundation +import UIKit + +import os.signpost + +/* + * Real camera implementation that uses AVFoundation + */ +class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelegate, AVCapturePhotoCaptureDelegate { + var previewView: UIView { cameraPreview } + + private let cameraPreview = RealPreviewView(frame: .zero) + private let session = AVCaptureSession() + // Communicate with the session and other session objects on this queue. + private let sessionQueue = DispatchQueue(label: "session queue") + + // utilities + private var setupResult: SetupResult = .notStarted + private var isSessionRunning: Bool = false + private var backgroundRecordingId: UIBackgroundTaskIdentifier = .invalid + + private var videoDeviceInput: AVCaptureDeviceInput? + private let photoOutput = AVCapturePhotoOutput() + private let metadataOutput = AVCaptureMetadataOutput() + + private var cameraType: CameraType = .back + private var flashMode: FlashMode = .auto + private var torchMode: TorchMode = .off + private var resetFocus: (() -> Void)? + private var focusFinished: (() -> Void)? + private var onReadCode: RCTDirectEventBlock? + + // KVO observation + private var adjustingFocusObservation: NSKeyValueObservation? + + private var inProgressPhotoCaptureDelegates = [Int64: PhotoCaptureDelegate]() + + // MARK: - Lifecycle + + override init() { + // No-op + } + + @available(*, unavailable) + required init?(coder aDecoder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + func cameraRemovedFromSuperview() { + sessionQueue.async { + if #available(iOS 12.0, *) { + os_signpost(.begin, log: log, name: "stopRunning") + } + + if self.setupResult == .success { + print("------- stop running \(Thread.current)") + self.session.stopRunning() + self.removeObservers() + } + + if #available(iOS 12.0, *) { + os_signpost(.end, log: log, name: "stopRunning") + } + } + } + + deinit { + print("------- deinit RealCamera \(Thread.current)") + removeObservers() + } + + // MARK: - Public + + func setup() { + if #available(iOS 12.0, *) { + os_signpost(.begin, log: log, name: "setup") + } + + print("setup \(Thread.current)") + + session.sessionPreset = .photo + + cameraPreview.session = session + cameraPreview.previewLayer.videoGravity = .resizeAspectFill + + // Setup the capture session. + // In general, it is not safe to mutate an AVCaptureSession or any of its inputs, outputs, or connections from multiple threads at the same time. + // Why not do all of this on the main queue? + // Because -[AVCaptureSession startRunning] is a blocking call which can take a long time. We dispatch session setup to the sessionQueue + // so that the main queue isn't blocked, which keeps the UI responsive. + sessionQueue.async { + if #available(iOS 12.0, *) { + os_signpost(.event, log: log, name: "Processing", "setupCaptureSession") + } + + self.setupResult = self.setupCaptureSession() + + self.addObservers() + + if self.setupResult == .success { + print("---- startRunning") + if #available(iOS 12.0, *) { + os_signpost(.event, log: log, name: "Processing", "startRunning") + } + self.session.startRunning() + if #available(iOS 12.0, *) { + os_signpost(.event, log: log, name: "Processing", "finished startRunning") + } + } + + if #available(iOS 12.0, *) { + os_signpost(.end, log: log, name: "setup") + } + } + } + + func update(zoomVelocity: CGFloat) { + guard !zoomVelocity.isNaN else { return } + + sessionQueue.async { + let pinchVelocityDividerFactor: CGFloat = 20.0 + self.videoDeviceInput?.device.incrementZoomFactor(atan(zoomVelocity / pinchVelocityDividerFactor)) + } + } + + func focus(at touchPoint: CGPoint, focusBehavior: FocusBehavior) { + DispatchQueue.main.async { + if #available(iOS 12.0, *) { + os_signpost(.begin, log: log, name: "focusat") + } + + let devicePoint = self.cameraPreview.previewLayer.captureDevicePointConverted(fromLayerPoint: touchPoint) + + self.sessionQueue.async { + if case let .customFocus(_, resetFocus, focusFinished) = focusBehavior { + self.resetFocus = resetFocus + self.focusFinished = focusFinished + } else { + self.resetFocus = nil + self.focusFinished = nil + } + + self.videoDeviceInput?.device.focusWithMode(focusBehavior.avFocusMode, + exposeWithMode: focusBehavior.exposureMode, + atDevicePoint: devicePoint, + isSubjectAreaChangeMonitoringEnabled: focusBehavior.isSubjectAreaChangeMonitoringEnabled) + + if #available(iOS 12.0, *) { + os_signpost(.end, log: log, name: "focusat") + } + } + } + } + + func update(torchMode: TorchMode) { + self.torchMode = torchMode + + sessionQueue.async { + if #available(iOS 12.0, *) { + os_signpost(.begin, log: log, name: "torchMode") + } + + if (self.videoDeviceInput?.device.torchMode != torchMode.avTorchMode) { + print("update torchMode from from \(self.videoDeviceInput?.device.torchMode.rawValue) to \(torchMode.avTorchMode.rawValue)") + self.videoDeviceInput?.device.setTorchMode(torchMode.avTorchMode) + } + + if #available(iOS 12.0, *) { + os_signpost(.end, log: log, name: "torchMode") + } + } + } + + func update(flashMode: FlashMode) { + self.flashMode = flashMode + } + + func update(cameraType: CameraType) { + self.cameraType = cameraType + + if #available(iOS 12.0, *) { + os_signpost(.event, log: log, name: "update cameraType") + } + + sessionQueue.async { + if self.videoDeviceInput?.device.position == cameraType.avPosition { + return + } + + if #available(iOS 12.0, *) { + os_signpost(.begin, log: log, name: "cameraType") + } + + // Avoid chaining device inputs when camera input is denied by the user, since both front and rear vido input devices will be nil + guard self.setupResult == .success, + let currentViewDeviceInput = self.videoDeviceInput, + let videoDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: cameraType.avPosition), + let videoDeviceInput = try? AVCaptureDeviceInput(device: videoDevice) else { + return + } + + self.removeObservers() + self.session.beginConfiguration() + + // Remove the existing device input first, since using the front and back camera simultaneously is not supported. + self.session.removeInput(currentViewDeviceInput) + + if self.session.canAddInput(videoDeviceInput) { + self.session.addInput(videoDeviceInput) + self.videoDeviceInput = videoDeviceInput + } else { + // If it fails, put back current camera + self.session.addInput(currentViewDeviceInput) + } + + self.session.commitConfiguration() + self.addObservers() + + if #available(iOS 12.0, *) { + os_signpost(.end, log: log, name: "cameraType") + } + } + } + + func capturePicture(onWillCapture: @escaping () -> Void, + onSuccess: @escaping (_ imageData: Data) -> Void, + onError: @escaping (_ message: String) -> Void) { + /* + Retrieve the video preview layer's video orientation on the main queue before + entering the session queue. Do this to ensure that UI elements are accessed on + the main thread and session configuration is done on the session queue. + */ + DispatchQueue.main.async { + let videoPreviewLayerOrientation = self.cameraPreview.previewLayer.connection?.videoOrientation + + self.sessionQueue.async { + if let photoOutputConnection = self.photoOutput.connection(with: .video), let videoPreviewLayerOrientation { + photoOutputConnection.videoOrientation = videoPreviewLayerOrientation + } + + let settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg]) + settings.isAutoStillImageStabilizationEnabled = true + + if self.videoDeviceInput?.device.isFlashAvailable == true { + settings.flashMode = self.flashMode.avFlashMode + } + + let photoCaptureDelegate = PhotoCaptureDelegate( + with: settings, + onWillCapture: onWillCapture, + onCaptureSuccess: { uniqueID, imageData in + self.inProgressPhotoCaptureDelegates[uniqueID] = nil + onSuccess(imageData) + }, + onCaptureError: { uniqueID, errorMessage in + self.inProgressPhotoCaptureDelegates[uniqueID] = nil + onError(errorMessage) + }) + + self.inProgressPhotoCaptureDelegates[photoCaptureDelegate.requestedPhotoSettings.uniqueID] = photoCaptureDelegate + self.photoOutput.capturePhoto(with: settings, delegate: photoCaptureDelegate) + } + } + } + + func isBarcodeScannerEnabled(_ isEnabled: Bool, + supportedBarcodeType: [AVMetadataObject.ObjectType], + onReadCode: RCTDirectEventBlock?) { + self.onReadCode = onReadCode + + sessionQueue.async { + if #available(iOS 12.0, *) { + os_signpost(.begin, log: log, name: "isBarcodeScannerEnabled") + } + + print("--------- isBarcodeScannerEnabled") + + if isEnabled && onReadCode != nil { + let availableTypes = self.metadataOutput.availableMetadataObjectTypes + let filtered = supportedBarcodeType.filter { type in availableTypes.contains(type) } + self.metadataOutput.metadataObjectTypes = self.metadataOutput.availableMetadataObjectTypes // filtered + } else { + self.metadataOutput.metadataObjectTypes = [] + } + + // if isEnabled && self.metadataOutput == nil { + // self.session.beginConfiguration() + // + // let metadataOutput = AVCaptureMetadataOutput() + // if self.session.canAddOutput(metadataOutput) { + // self.session.addOutput(metadataOutput) + // metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main) + // + // let availableTypes = metadataOutput.availableMetadataObjectTypes + // metadataOutput.metadataObjectTypes = supportedBarcodeType.filter { type in availableTypes.contains(type) } + // + // self.metadataOutput = metadataOutput + // } + // + // self.session.commitConfiguration() + // } else if !isEnabled, let metadataOutput { + // self.session.beginConfiguration() + // self.session.removeOutput(metadataOutput) + // self.session.commitConfiguration() + // + // self.metadataOutput = nil + // } + + if #available(iOS 12.0, *) { + os_signpost(.end, log: log, name: "isBarcodeScannerEnabled") + } + } + } + + func update(scannerFrameSize: CGRect?) { + self.sessionQueue.async { + if #available(iOS 12.0, *) { + os_signpost(.end, log: log, name: "scannerFrameSize") + } + + if !self.session.isRunning { + print("setting rectOfInterest while session not running wouldn't work") + return + } + + DispatchQueue.main.async { + let visibleRect = scannerFrameSize != nil && scannerFrameSize != .zero ? self.cameraPreview.previewLayer.metadataOutputRectConverted(fromLayerRect: scannerFrameSize!) : nil + + print("------ update scannerFrameSize \(visibleRect ?? CGRect(x: 0, y: 0, width: 1, height: 1))") + self.sessionQueue.async { + self.metadataOutput.rectOfInterest = visibleRect ?? CGRect(x: 0, y: 0, width: 1, height: 1) + + if #available(iOS 12.0, *) { + os_signpost(.end, log: log, name: "scannerFrameSize") + } + } + } + } + } + + // MARK: - AVCaptureMetadataOutputObjectsDelegate + + func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) { + // Try to retrieve the barcode from the metadata extracted + guard let machineReadableCodeObject = metadataObjects.first as? AVMetadataMachineReadableCodeObject, + let codeStringValue = machineReadableCodeObject.stringValue else { + return + } + + print("----------- \(codeStringValue)") + + onReadCode?(["codeStringValue": codeStringValue]) + // check code is different? should pause few seconds instead and allow user to scan a second time + } + + // MARK: - Private + + private func setupCaptureSession() -> SetupResult { + guard let videoDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: cameraType.avPosition), + let videoDeviceInput = try? AVCaptureDeviceInput(device: videoDevice) else { + return .sessionConfigurationFailed + } + + session.beginConfiguration() + + if session.canAddInput(videoDeviceInput) { + session.addInput(videoDeviceInput) + self.videoDeviceInput = videoDeviceInput + } else { + return .sessionConfigurationFailed + } + + if session.canAddOutput(photoOutput) { + session.addOutput(photoOutput) + } else { + return .sessionConfigurationFailed + } + + if self.session.canAddOutput(metadataOutput) { + self.session.addOutput(metadataOutput) + metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main) + + metadataOutput.metadataObjectTypes = metadataOutput.availableMetadataObjectTypes + } + + session.commitConfiguration() + + self.refreshPreviewVideoOrientation() + + return .success + } + + private func refreshPreviewVideoOrientation() { + DispatchQueue.main.async { + guard let orientation = Orientation(from: UIApplication.shared.statusBarOrientation)?.avVideoOrientation else { return } + + self.cameraPreview.previewLayer.connection?.videoOrientation = orientation + } + } + + // MARK: Private observers + + private func addObservers() { + guard adjustingFocusObservation == nil else { return } + + NotificationCenter.default.addObserver(forName: UIApplication.didChangeStatusBarOrientationNotification, + object: nil, + queue: nil, + using: { [weak self] _ in self?.refreshPreviewVideoOrientation() }) + + adjustingFocusObservation = videoDeviceInput?.device.observe(\.isAdjustingFocus, + options: .new, + changeHandler: { [weak self] device, change in + guard let self, let isFocusing = change.newValue else { return } + + self.isAdjustingFocus(isFocusing: isFocusing) + }) + + NotificationCenter.default.addObserver(forName: .AVCaptureDeviceSubjectAreaDidChange, + object: videoDeviceInput?.device, + queue: nil, + using: { [weak self] notification in self?.subjectAreaDidChange(notification: notification) }) + NotificationCenter.default.addObserver(forName: .AVCaptureSessionRuntimeError, + object: session, + queue: nil, + using: { [weak self] notification in self?.sessionRuntimeError(notification: notification) }) + NotificationCenter.default.addObserver(forName: .AVCaptureSessionWasInterrupted, + object: session, + queue: nil, + using: { [weak self] notification in self?.sessionWasInterrupted(notification: notification) }) + + } + + private func removeObservers() { + NotificationCenter.default.removeObserver(self) + + adjustingFocusObservation?.invalidate() + adjustingFocusObservation = nil + } + + private func isAdjustingFocus(isFocusing: Bool) { + if !isFocusing { + focusFinished?() + } + } + + private func subjectAreaDidChange(notification: Notification) { + resetFocus?() + } + + private func sessionRuntimeError(notification: Notification) { + guard let error = notification.userInfo?[AVCaptureSessionErrorKey] as? AVError else { return } + + print("Capture session runtime error: \(error)") + + // Automatically try to restart the session running if media services were reset and the last start running succeeded. + if error.code == .mediaServicesWereReset { + sessionQueue.async { + if self.isSessionRunning { + self.session.startRunning() + self.isSessionRunning = self.session.isRunning + } + } + } + // Otherwise, enable the user to try to resume the session running. + // FIXME: Missing showResumeButton + } + + private func sessionWasInterrupted(notification: Notification) { + // In some scenarios we want to enable the user to resume the session running. + // For example, if music playback is initiated via control center while using AVCam, + // then the user can let AVCam resume the session running, which will stop music playback. + // Note that stopping music playback in control center will not automatically resume the session running. + // Also note that it is not always possible to resume, see -[resumeInterruptedSession:]. + var showResumeButton = false + + if let reasonValue = notification.userInfo?[AVCaptureSessionInterruptionReasonKey] as? Int, + let reason = AVCaptureSession.InterruptionReason(rawValue: reasonValue) { + print("Capture session was interrupted with reason \(reason)") + + if reason == .audioDeviceInUseByAnotherClient || reason == .videoDeviceInUseByAnotherClient { + showResumeButton = true + } + } + + // FIXME: Missing use of showResumeButton + } +} diff --git a/ios/ReactNativeCameraKit/RealPreviewView.swift b/ios/ReactNativeCameraKit/RealPreviewView.swift new file mode 100644 index 0000000000..56c9e96c9f --- /dev/null +++ b/ios/ReactNativeCameraKit/RealPreviewView.swift @@ -0,0 +1,25 @@ +// +// RealPreviewView.swift +// ReactNativeCameraKit +// + +import AVFoundation + +class RealPreviewView: UIView { + // Use AVCaptureVideoPreviewLayer as the view's backing layer. + override class var layerClass: AnyClass { + AVCaptureVideoPreviewLayer.self + } + + // Create an accessor for the right layer type + var previewLayer: AVCaptureVideoPreviewLayer { + // We can safely forcecast here, it can't change at runtime + return layer as! AVCaptureVideoPreviewLayer + } + + // Connect the layer to a capture session. + var session: AVCaptureSession? { + get { previewLayer.session } + set { previewLayer.session = newValue } + } +} diff --git a/ios/ReactNativeCameraKit/ScannerFrameView.swift b/ios/ReactNativeCameraKit/ScannerFrameView.swift new file mode 100644 index 0000000000..381da4f8a7 --- /dev/null +++ b/ios/ReactNativeCameraKit/ScannerFrameView.swift @@ -0,0 +1,99 @@ +// +// ScannerFrame.swift +// ReactNativeCameraKit +// + +import UIKit + +/* + * Frame for the barcode scanner + */ +class ScannerFrameView: UIView { + private let laserView = UIView() + private let frameViews: [UIView] = (0..<8).map { _ in UIView() } + + // MARK: - Lifecycle + + init(frameColor: UIColor, laserColor: UIColor) { + super.init(frame: .zero) + + laserView.backgroundColor = laserColor + addSubview(laserView) + + frameViews.forEach { + $0.backgroundColor = frameColor + addSubview($0) + } + } + + @available(*, unavailable) + required init?(coder aDecoder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + override func draw(_ rect: CGRect) { + super.draw(rect) + + frameViews.enumerated().forEach { (index, view) in + view.frame = sizeForFramePart(at: index) + } + + startAnimatingScanner() + } + + // MARK: - Public + + func startAnimatingScanner() { + if laserView.frame.origin.y != 0 { + laserView.frame = CGRect(x: 2, y: 2, width: frame.size.width - 4, height: 2) + } + + + UIView.animate(withDuration: 3, delay: 0, options: [.autoreverse, .repeat], animations: { + self.laserView.center = CGPoint(x: self.frame.size.width / 2, y: self.frame.size.height - 3) + }) + } + + func stopAnimatingScanner() { + laserView.removeFromSuperview() + } + + func update(frameColor: UIColor) { + frameViews.forEach { $0.backgroundColor = frameColor } + } + + func update(laserColor: UIColor) { + laserView.backgroundColor = laserColor + } + + // MARK: - Private + + private func sizeForFramePart(at index: Int) -> CGRect { + let cornerHeight: CGFloat = 20.0 + let cornerWidth: CGFloat = 2.0 + + switch index { + case 0: + return .init(x: 0, y: 0, width: cornerWidth, height: cornerHeight) + case 1: + return .init(x: 0, y: 0, width: cornerHeight, height: cornerWidth) + case 2: + return .init(x: bounds.width - cornerHeight, y: 0, width: cornerHeight, height: cornerWidth) + case 3: + return .init(x: bounds.width - cornerWidth, y: 0, width: cornerWidth, height: cornerHeight) + case 4: + return .init(x: bounds.width - cornerWidth, + y: bounds.height - cornerHeight, + width: cornerWidth, + height: cornerHeight) + case 5: + return .init(x: bounds.width - cornerHeight, y: bounds.height - cornerWidth, width: cornerHeight, height: cornerWidth) + case 6: + return .init(x: 0, y: bounds.height - cornerWidth, width: cornerHeight, height: cornerWidth) + case 7: + return .init(x: 0, y: bounds.height - cornerHeight, width: cornerWidth, height: cornerHeight) + default: + fatalError("unknown index") + } + } +} diff --git a/ios/ReactNativeCameraKit/ScannerInterfaceView.swift b/ios/ReactNativeCameraKit/ScannerInterfaceView.swift new file mode 100644 index 0000000000..38d43e92a7 --- /dev/null +++ b/ios/ReactNativeCameraKit/ScannerInterfaceView.swift @@ -0,0 +1,88 @@ +// +// ScannerInterfaceView.swift +// ReactNativeCameraKit +// + +import UIKit + +/* + * Full screen scanner interface + */ +class ScannerInterfaceView: UIView { + private let frameView: ScannerFrameView + private let topOverlayView = UIView() + private let bottomOverlayView = UIView() + private let leftOverlayView = UIView() + private let rightOverlayView = UIView() + + // MARK: - Constants + + private let frameOffset: CGFloat = 30 + private let frameHeight: CGFloat = 200 + private let overlayColor: UIColor = .black.withAlphaComponent(0.4) + + // MARK: - Lifecycle + + init(frameColor: UIColor, laserColor: UIColor) { + frameView = ScannerFrameView(frameColor: frameColor, laserColor: laserColor) + + super.init(frame: .zero) + + addSubview(frameView) + + frameView.startAnimatingScanner() + + [topOverlayView, bottomOverlayView, leftOverlayView, rightOverlayView].forEach { + $0.backgroundColor = overlayColor + addSubview($0) + } + } + + @available(*, unavailable) + required init?(coder aDecoder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + override func draw(_ rect: CGRect) { + super.draw(rect) + + frameView.frame = CGRect(x: 0, y: 0, width: bounds.size.width - 2 * frameOffset, height: frameHeight) + frameView.center = center + + print("--- draw rect scannerInterface \(frameView.frame)") + + updateOverlaySize(frameView.frame) + } + + // MARK: - Public + + var frameSize: CGRect { + print("--- frameView.frame \(frameView.frame)") + return frameView.frame + } + + func startAnimatingScanner() { + frameView.startAnimatingScanner() + } + + func stopAnimatingScanner() { + frameView.stopAnimatingScanner() + } + + func update(frameColor: UIColor) { + frameView.update(frameColor: frameColor) + } + + func update(laserColor: UIColor) { + frameView.update(laserColor: laserColor) + } + + // MARK: - Private + + private func updateOverlaySize(_ frameRect: CGRect) { + topOverlayView.frame = CGRect(x: 0, y: 0, width: frame.size.width, height: frameRect.origin.y) + leftOverlayView.frame = CGRect(x: 0, y: frameRect.origin.y, width: frameOffset, height: frameHeight) + rightOverlayView.frame = CGRect(x: frameRect.size.width + frameOffset, y: frameRect.origin.y, width: frameOffset, height: frameHeight) + bottomOverlayView.frame = CGRect(x: 0, y: frameRect.origin.y + frameHeight, width: frame.size.width, height: frame.size.height - frameRect.origin.y - frameHeight) + } +} diff --git a/ios/ReactNativeCameraKit/SimulatorCamera.swift b/ios/ReactNativeCameraKit/SimulatorCamera.swift new file mode 100644 index 0000000000..197a2c8a6e --- /dev/null +++ b/ios/ReactNativeCameraKit/SimulatorCamera.swift @@ -0,0 +1,93 @@ +// +// SimulatorCamera.swift +// ReactNativeCameraKit +// + +import AVFoundation +import UIKit + +import os.signpost + +/* + * Fake camera implementation to be used on simulator + */ +class SimulatorCamera: CameraProtocol { + var previewView: UIView { mockPreview } + + var onReadCode: RCTDirectEventBlock? + + private var fakeFocusFinishedTimer: Timer? + + // Create mock camera layer. When a photo is taken, we capture this layer and save it in place of a hardware input. + private let mockPreview = SimulatorPreviewView(frame: .zero) + + // MARK: - Public + + func setup() {} + func cameraRemovedFromSuperview() {} + + func update(zoomVelocity: CGFloat) { + DispatchQueue.main.async { + self.mockPreview.zoomVelocityLabel.text = "Zoom Velocity: \(zoomVelocity)" + } + } + + func focus(at: CGPoint, focusBehavior: FocusBehavior) { + DispatchQueue.main.async { + self.mockPreview.focusAtLabel.text = "Focus at: (\(Int(at.x)), \(Int(at.y))), focusMode: \(focusBehavior.avFocusMode)" + } + + // Fake focus finish after a second + fakeFocusFinishedTimer?.invalidate() + if case let .customFocus(_, _, focusFinished) = focusBehavior { + fakeFocusFinishedTimer = Timer.scheduledTimer(withTimeInterval: 1, repeats: false) { _ in + focusFinished() + } + } + } + + func update(torchMode: TorchMode) { + DispatchQueue.main.async { + self.mockPreview.torchModeLabel.text = "Torch mode: \(torchMode)" + } + } + + func update(flashMode: FlashMode) { + DispatchQueue.main.async { + self.mockPreview.flashModeLabel.text = "Flash mode: \(flashMode)" + } + } + + func update(cameraType: CameraType) { + DispatchQueue.main.async { + self.mockPreview.cameraTypeLabel.text = "Camera type: \(cameraType)" + + self.mockPreview.randomize() + } + } + + func isBarcodeScannerEnabled(_ isEnabled: Bool, + supportedBarcodeType: [AVMetadataObject.ObjectType], + onReadCode: RCTDirectEventBlock?) {} + func update(scannerFrameSize: CGRect?) {} + + func capturePicture(onWillCapture: @escaping () -> Void, + onSuccess: @escaping (_ imageData: Data) -> (), + onError: @escaping (_ message: String) -> ()) { + onWillCapture() + + DispatchQueue.main.async { + // Generate snapshot from main UI thread + let previewSnapshot = self.mockPreview.snapshot(withTimestamp: true) + + // Then switch to background thread + DispatchQueue.global(qos: .default).async { + if let imageData = previewSnapshot?.jpegData(compressionQuality: 0.85) { + onSuccess(imageData) + } else { + onError("Failed to convert snapshot to JPEG data") + } + } + } + } +} diff --git a/ios/ReactNativeCameraKit/CKMockPreview.swift b/ios/ReactNativeCameraKit/SimulatorPreviewView.swift similarity index 74% rename from ios/ReactNativeCameraKit/CKMockPreview.swift rename to ios/ReactNativeCameraKit/SimulatorPreviewView.swift index 627d955a77..bc33d24ad7 100644 --- a/ios/ReactNativeCameraKit/CKMockPreview.swift +++ b/ios/ReactNativeCameraKit/SimulatorPreviewView.swift @@ -1,20 +1,39 @@ // -// CKMockPreview.swift +// SimulatorPreviewView.swift // ReactNativeCameraKit // import UIKit -@objc(CKMockPreview) -public class CKMockPreview: UIView { - // MARK: - Public +class SimulatorPreviewView: UIView { + let zoomVelocityLabel = UILabel() + let focusAtLabel = UILabel() + let torchModeLabel = UILabel() + let flashModeLabel = UILabel() + let cameraTypeLabel = UILabel() + + var balloonLayer = CALayer() + + // MARK: - Lifecycle - @objc - public override init(frame: CGRect) { + override init(frame: CGRect) { super.init(frame: frame) - layer.cornerRadius = 10.0 layer.masksToBounds = true + + layer.insertSublayer(balloonLayer, at: 0) + + let stackView = UIStackView() + stackView.axis = .vertical + addSubview(stackView) + + stackView.translatesAutoresizingMaskIntoConstraints = false + stackView.topAnchor.constraint(equalTo: safeAreaLayoutGuide.topAnchor).isActive = true + stackView.leadingAnchor.constraint(equalTo: leadingAnchor, constant: 10).isActive = true + [zoomVelocityLabel, focusAtLabel, torchModeLabel, flashModeLabel, cameraTypeLabel].forEach { + $0.numberOfLines = 0 + stackView.addArrangedSubview($0) + } } @available(*, unavailable) @@ -22,14 +41,15 @@ public class CKMockPreview: UIView { fatalError("init(coder:) has not been implemented") } - override public func layoutSubviews() { - super.layoutSubviews() + override func draw(_ rect: CGRect) { + super.draw(rect) randomize() } - @objc - public func snapshotWithTimestamp(_ showTimestamp: Bool) -> UIImage? { + // MARK: - Public + + func snapshot(withTimestamp showTimestamp: Bool) -> UIImage? { UIGraphicsBeginImageContextWithOptions(bounds.size, false, 0) drawHierarchy(in: bounds, afterScreenUpdates: false) var image = UIGraphicsGetImageFromCurrentImageContext() @@ -42,7 +62,7 @@ public class CKMockPreview: UIView { let font = UIFont.boldSystemFont(ofSize: 20) image?.draw(in: CGRect(x: 0, y: 0, width: image?.size.width ?? 0, height: image?.size.height ?? 0)) - let rect = CGRect(x: 25, y: 25, width: image?.size.width ?? 0, height: image?.size.height ?? 0) + let rect = CGRect(x: 25, y: 125, width: image?.size.width ?? 0, height: image?.size.height ?? 0) UIColor.white.set() let textFontAttributes = [NSAttributedString.Key.font: font] stringFromDate.draw(in: rect.integral, withAttributes: textFontAttributes) @@ -54,10 +74,12 @@ public class CKMockPreview: UIView { return image } - @objc - public func randomize() { + func randomize() { + print("randomize \(Thread.current)") layer.backgroundColor = UIColor(hue: CGFloat(Double.random(in: 0...1)), saturation: 1.0, brightness: 1.0, alpha: 1.0).cgColor - layer.sublayers = nil + balloonLayer.removeFromSuperlayer() + balloonLayer = CALayer() + layer.insertSublayer(balloonLayer, at: 0) for _ in 0..<5 { drawBalloon() @@ -107,7 +129,7 @@ public class CKMockPreview: UIView { circle.addSublayer(reflection) balloon.addSublayer(circle) - layer.addSublayer(balloon) + balloonLayer.addSublayer(balloon) // Apply animation let scale = CABasicAnimation(keyPath: "transform.scale") diff --git a/ios/ReactNativeCameraKit/Types.swift b/ios/ReactNativeCameraKit/Types.swift new file mode 100644 index 0000000000..68f3f658d5 --- /dev/null +++ b/ios/ReactNativeCameraKit/Types.swift @@ -0,0 +1,161 @@ +// +// Types.swift +// ReactNativeCameraKit +// + +import AVFoundation +import Foundation + +// Dummy class used for RCTConvert +@objc(CKType) class Types: NSObject {} + +@objc(CKCameraType) +public enum CameraType: Int, CustomStringConvertible { + case back + case front + + var avPosition: AVCaptureDevice.Position { + switch self { + case .back: return .back + case .front: return .front + } + } + + public var description: String { + switch self { + case .back: return "back" + case .front: return "front" + } + } +} + +@objc(CKFlashMode) +public enum FlashMode: Int, CustomStringConvertible { + case on + case off + case auto + + var avFlashMode: AVCaptureDevice.FlashMode { + switch self { + case .on: return .on + case .off: return .off + case .auto: return .auto + } + } + + public var description: String { + switch self { + case .on: return "on" + case .off: return "off" + case .auto: return "auto" + } + } +} + +@objc(CKTorchMode) +public enum TorchMode: Int, CustomStringConvertible { + case on + case off + + init(from string: String) { + switch string { + case "on": self = .on + default: self = .off + } + } + + var avTorchMode: AVCaptureDevice.TorchMode { + switch self { + case .on: return .on + case .off: return .off + } + } + + public var description: String { + switch self { + case .on: return "on" + case .off: return "off" + } + } +} + +@objc(CKFocusMode) +public enum FocusMode: Int, CustomStringConvertible { + case on + case off + + public var description: String { + switch self { + case .on: return "on" + case .off: return "off" + } + } +} + +@objc(CKZoomMode) +public enum ZoomMode: Int, CustomStringConvertible { + case on + case off + + public var description: String { + switch self { + case .on: return "on" + case .off: return "off" + } + } +} + +@objc(CKSetupResult) +enum SetupResult: Int { + case notStarted + case success + case cameraNotAuthorized + case sessionConfigurationFailed +} + +enum Orientation: Int { + case portrait = 0 // ⬆️ + case landscapeLeft = 1 // ⬅️ + case portraitUpsideDown = 2 // ⬇️ + case landscapeRight = 3 // ➡️ + + init?(from orientation: UIDeviceOrientation) { + switch orientation { + case .portrait: self = .portrait + case .landscapeLeft: self = .landscapeLeft + case .portraitUpsideDown: self = .portraitUpsideDown + case .landscapeRight: self = .landscapeRight + default: return nil + } + } + + init?(from orientation: UIInterfaceOrientation) { + switch orientation { + case .portrait: self = .portrait + case .landscapeLeft: self = .landscapeLeft + case .portraitUpsideDown: self = .portraitUpsideDown + case .landscapeRight: self = .landscapeRight + default: return nil + } + } + + var avVideoOrientation: AVCaptureVideoOrientation { + switch self { + case .portrait: return .portrait + case .landscapeLeft: return .landscapeLeft + case .portraitUpsideDown: return .portraitUpsideDown + case .landscapeRight: return .landscapeRight + } + } +} + +extension AVCaptureDevice.FocusMode: CustomStringConvertible { + public var description: String { + switch self { + case .autoFocus: return "autofocus" + case .continuousAutoFocus: return "continuousAutoFocus" + case .locked: return "locked" + @unknown default: return "unknown" + } + } +} From baf3e11feb08ef808e9910df0b3caa6d574a641f Mon Sep 17 00:00:00 2001 From: David Bertet Date: Tue, 27 Jun 2023 21:53:36 -0700 Subject: [PATCH 03/20] Fix Git Action iOS build --- .github/workflows/build.yml | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 7cac1a5313..432ee6f8b7 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -12,18 +12,18 @@ jobs: - name: Bootstrap run: yarn bootstrap - name: Build - run: cd example/ios && xcodebuild -workspace CameraKitExample.xcworkspace -configuration Debug -scheme CameraKitExample -arch x86_64 + run: cd example/ios && xcodebuild -workspace CameraKitExample.xcworkspace -configuration Debug -scheme CameraKitExample -sdk iphoneos build CODE_SIGN_IDENTITY="" CODE_SIGNING_REQUIRED=NO build-example-android: - name: build-example-android - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v2 - - name: Validate Gradle Wrapper - uses: gradle/wrapper-validation-action@v1 - - name: Install modules - run: yarn - - name: Bootstrap - run: yarn bootstrap - - name: Build - run: cd example/android && ./gradlew assembleDebug + name: build-example-android + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Validate Gradle Wrapper + uses: gradle/wrapper-validation-action@v1 + - name: Install modules + run: yarn + - name: Bootstrap + run: yarn bootstrap + - name: Build + run: cd example/android && ./gradlew assembleDebug From d730a760510c2ea33a5f6326117455238d0857db Mon Sep 17 00:00:00 2001 From: David Bertet Date: Fri, 30 Jun 2023 00:24:37 -0700 Subject: [PATCH 04/20] Fix small issues, throttle scanner --- README.md | 1 + ios/ReactNativeCameraKit/CKCameraManager.m | 8 +- ios/ReactNativeCameraKit/CameraManager.swift | 2 +- ios/ReactNativeCameraKit/CameraProtocol.swift | 4 +- ios/ReactNativeCameraKit/CameraView.swift | 48 +++++++- .../FocusInterfaceView.swift | 5 - ios/ReactNativeCameraKit/RealCamera.swift | 104 +++++++++++------- .../SimulatorCamera.swift | 10 +- src/Camera.ios.tsx | 8 +- src/CameraScreen.tsx | 6 +- 10 files changed, 125 insertions(+), 71 deletions(-) diff --git a/README.md b/README.md index e45fab7770..9e1980fdd9 100644 --- a/README.md +++ b/README.md @@ -181,6 +181,7 @@ Additionally, the Camera / CameraScreen can be used for barcode scanning | `ratioOverlayColor` | Color | Any color with alpha. Default: `'#ffffff77'` | | `resetFocusTimeout` | Number | Dismiss tap to focus after this many milliseconds. Default `0` (disabled). Example: `5000` is 5 seconds. | | `resetFocusWhenMotionDetected` | Boolean | Dismiss tap to focus when focus area content changes. Native iOS feature, see documentation: https://developer.apple.com/documentation/avfoundation/avcapturedevice/1624644-subjectareachangemonitoringenabl?language=objc). Default `true`. | +| `scanThrottleDelay` | Number | Duration between scan detection in milliseconds. Default 2000 (2s) | | **Barcode only** | | `scanBarcode` | Boolean | Enable barcode scanner. Default: `false` | | `showFrame` | Boolean | Show frame in barcode scanner. Default: `false` | diff --git a/ios/ReactNativeCameraKit/CKCameraManager.m b/ios/ReactNativeCameraKit/CKCameraManager.m index 499902995a..0de8e2bdc3 100644 --- a/ios/ReactNativeCameraKit/CKCameraManager.m +++ b/ios/ReactNativeCameraKit/CKCameraManager.m @@ -18,19 +18,21 @@ @interface RCT_EXTERN_MODULE(CKCameraManager, RCTViewManager) RCT_EXPORT_VIEW_PROPERTY(cameraType, CKCameraType) RCT_EXPORT_VIEW_PROPERTY(flashMode, CKFlashMode) RCT_EXPORT_VIEW_PROPERTY(torchMode, CKTorchMode) -RCT_EXPORT_VIEW_PROPERTY(focusMode, CKFocusMode) -RCT_EXPORT_VIEW_PROPERTY(zoomMode, CKZoomMode) RCT_EXPORT_VIEW_PROPERTY(ratioOverlay, NSString) RCT_EXPORT_VIEW_PROPERTY(ratioOverlayColor, UIColor) RCT_EXPORT_VIEW_PROPERTY(scanBarcode, BOOL) RCT_EXPORT_VIEW_PROPERTY(onReadCode, RCTDirectEventBlock) -RCT_EXPORT_VIEW_PROPERTY(onOrientationChange, RCTDirectEventBlock) RCT_EXPORT_VIEW_PROPERTY(showFrame, BOOL) +RCT_EXPORT_VIEW_PROPERTY(scanThrottleDelay, NSInteger) RCT_EXPORT_VIEW_PROPERTY(laserColor, UIColor) RCT_EXPORT_VIEW_PROPERTY(frameColor, UIColor) + +RCT_EXPORT_VIEW_PROPERTY(onOrientationChange, RCTDirectEventBlock) RCT_EXPORT_VIEW_PROPERTY(resetFocusTimeout, NSInteger) RCT_EXPORT_VIEW_PROPERTY(resetFocusWhenMotionDetected, BOOL) +RCT_EXPORT_VIEW_PROPERTY(focusMode, CKFocusMode) +RCT_EXPORT_VIEW_PROPERTY(zoomMode, CKZoomMode) RCT_EXTERN_METHOD(capture:(NSDictionary*)options resolve:(RCTPromiseResolveBlock)resolve diff --git a/ios/ReactNativeCameraKit/CameraManager.swift b/ios/ReactNativeCameraKit/CameraManager.swift index 8750016f07..51c13c3f3f 100644 --- a/ios/ReactNativeCameraKit/CameraManager.swift +++ b/ios/ReactNativeCameraKit/CameraManager.swift @@ -7,7 +7,7 @@ import AVFoundation import Foundation /* - * Class that manages the communication between React Native and the native implementation + * Class managing the communication between React Native and the native implementation */ @objc(CKCameraManager) public class CameraManager: RCTViewManager { var camera: CameraView! diff --git a/ios/ReactNativeCameraKit/CameraProtocol.swift b/ios/ReactNativeCameraKit/CameraProtocol.swift index 16347d61ba..8a199a09a1 100644 --- a/ios/ReactNativeCameraKit/CameraProtocol.swift +++ b/ios/ReactNativeCameraKit/CameraProtocol.swift @@ -8,7 +8,7 @@ import AVFoundation protocol CameraProtocol: AnyObject, FocusInterfaceViewDelegate { var previewView: UIView { get } - func setup() + func setup(cameraType: CameraType, supportedBarcodeType: [AVMetadataObject.ObjectType]) func cameraRemovedFromSuperview() func update(zoomVelocity: CGFloat) @@ -18,7 +18,7 @@ protocol CameraProtocol: AnyObject, FocusInterfaceViewDelegate { func isBarcodeScannerEnabled(_ isEnabled: Bool, supportedBarcodeType: [AVMetadataObject.ObjectType], - onReadCode: RCTDirectEventBlock?) + onBarcodeRead: ((_ barcode: String) -> Void)?) func update(scannerFrameSize: CGRect?) func capturePicture(onWillCapture: @escaping () -> Void, diff --git a/ios/ReactNativeCameraKit/CameraView.swift b/ios/ReactNativeCameraKit/CameraView.swift index 76544473c7..b2f80367df 100644 --- a/ios/ReactNativeCameraKit/CameraView.swift +++ b/ios/ReactNativeCameraKit/CameraView.swift @@ -15,7 +15,7 @@ let log = OSLog( ) /* - * View that abtracts the logic unrelated to the actual camera + * View abtracting the logic unrelated to the actual camera * Like permission, ratio overlay, focus, zoom gesture, write image, etc */ @objc(CKCameraView) @@ -26,6 +26,7 @@ class CameraView: UIView { private let focusInterfaceView: FocusInterfaceView // scanner + private var lastBarcodeDetectedTime: TimeInterval = 0 private var scannerInterfaceView: ScannerInterfaceView private var supportedBarcodeType: [AVMetadataObject.ObjectType] = [.upce, .code39, .code39Mod43, .ean13, .ean8, .code93, @@ -49,6 +50,7 @@ class CameraView: UIView { @objc var scanBarcode = false @objc var showFrame = false @objc var onReadCode: RCTDirectEventBlock? + @objc var scanThrottleDelay = 2000 @objc var frameColor: UIColor? @objc var laserColor: UIColor? // other @@ -58,6 +60,28 @@ class CameraView: UIView { @objc var focusMode: FocusMode = .on @objc var zoomMode: ZoomMode = .on + // MARK: - Setup + + // This is used to delay camera setup until we have both granted permission & received default props + var hasCameraBeenSetup = false + var hasPropBeenSetup = false { + didSet { + setupCamera() + } + } + var hasPermissionBeenGranted = false { + didSet { + setupCamera() + } + } + + private func setupCamera() { + if (hasPropBeenSetup && hasPermissionBeenGranted && !hasCameraBeenSetup) { + hasCameraBeenSetup = true + camera.setup(cameraType: cameraType, supportedBarcodeType: scanBarcode && onReadCode != nil ? supportedBarcodeType : []) + } + } + // MARK: Lifecycle @available(*, unavailable) @@ -112,7 +136,6 @@ class CameraView: UIView { } deinit { -// removeObservers() UIDevice.current.endGeneratingDeviceOrientationNotifications() } @@ -145,6 +168,9 @@ class CameraView: UIView { if #available(iOS 12.0, *) { os_signpost(.begin, log: log, name: "didSetProps") } + + hasPropBeenSetup = true + print("------ didSetProps \(changedProps) \(Thread.current)") // Camera settings @@ -181,7 +207,7 @@ class CameraView: UIView { if changedProps.contains("scanBarcode") || changedProps.contains("onReadCode") { camera.isBarcodeScannerEnabled(scanBarcode, supportedBarcodeType: supportedBarcodeType, - onReadCode: onReadCode) + onBarcodeRead: { [weak self] barcode in self?.onBarcodeRead(barcode: barcode) }) } if changedProps.contains("showFrame") || changedProps.contains("scanBarcode") { @@ -259,13 +285,13 @@ class CameraView: UIView { switch AVCaptureDevice.authorizationStatus(for: .video) { case .authorized: // The user has previously granted access to the camera. - camera.setup() + hasPermissionBeenGranted = true break case .notDetermined: // The user has not yet been presented with the option to grant video access. AVCaptureDevice.requestAccess(for: .video) { [weak self] granted in if granted { - self?.camera.setup() + self?.hasPermissionBeenGranted = true } } default: @@ -311,6 +337,18 @@ class CameraView: UIView { onOrientationChange(["orientation": orientation.rawValue]) } + private func onBarcodeRead(barcode: String) { + // Throttle barcode detection + let now = Date.timeIntervalSinceReferenceDate + guard lastBarcodeDetectedTime + Double(scanThrottleDelay) / 1000 < now else { + return + } + + lastBarcodeDetectedTime = now + + onReadCode?(["codeStringValue": barcode]) + } + // MARK: - Gesture selectors @objc func handlePinchToZoomRecognizer(_ pinchRecognizer: UIPinchGestureRecognizer) { diff --git a/ios/ReactNativeCameraKit/FocusInterfaceView.swift b/ios/ReactNativeCameraKit/FocusInterfaceView.swift index 10856eae62..f873e8c4aa 100644 --- a/ios/ReactNativeCameraKit/FocusInterfaceView.swift +++ b/ios/ReactNativeCameraKit/FocusInterfaceView.swift @@ -73,11 +73,6 @@ class FocusInterfaceView: UIView { isUserInteractionEnabled = true } - override func touchesBegan(_ touches: Set, with event: UIEvent?) { - super.touchesBegan(touches, with: event) - print(touches) - } - @available(*, unavailable) required init?(coder aDecoder: NSCoder) { fatalError("init(coder:) has not been implemented") diff --git a/ios/ReactNativeCameraKit/RealCamera.swift b/ios/ReactNativeCameraKit/RealCamera.swift index 5629543f96..e852f3e1e8 100644 --- a/ios/ReactNativeCameraKit/RealCamera.swift +++ b/ios/ReactNativeCameraKit/RealCamera.swift @@ -33,7 +33,8 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega private var torchMode: TorchMode = .off private var resetFocus: (() -> Void)? private var focusFinished: (() -> Void)? - private var onReadCode: RCTDirectEventBlock? + private var onBarcodeRead: ((_ barcode: String) -> Void)? + private var scannerFrameSize: CGRect? = nil // KVO observation private var adjustingFocusObservation: NSKeyValueObservation? @@ -76,17 +77,19 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega // MARK: - Public - func setup() { + func setup(cameraType: CameraType, supportedBarcodeType: [AVMetadataObject.ObjectType]) { + self.cameraType = cameraType + if #available(iOS 12.0, *) { os_signpost(.begin, log: log, name: "setup") } print("setup \(Thread.current)") - session.sessionPreset = .photo - - cameraPreview.session = session - cameraPreview.previewLayer.videoGravity = .resizeAspectFill + DispatchQueue.main.async { + self.cameraPreview.session = self.session + self.cameraPreview.previewLayer.videoGravity = .resizeAspectFill + } // Setup the capture session. // In general, it is not safe to mutate an AVCaptureSession or any of its inputs, outputs, or connections from multiple threads at the same time. @@ -98,7 +101,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega os_signpost(.event, log: log, name: "Processing", "setupCaptureSession") } - self.setupResult = self.setupCaptureSession() + self.setupResult = self.setupCaptureSession(supportedBarcodeType: supportedBarcodeType) self.addObservers() @@ -111,6 +114,9 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega if #available(iOS 12.0, *) { os_signpost(.event, log: log, name: "Processing", "finished startRunning") } + + // We need to reapply the configuration after starting the camera + self.update(torchMode: self.torchMode) } if #available(iOS 12.0, *) { @@ -160,7 +166,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega func update(torchMode: TorchMode) { self.torchMode = torchMode - sessionQueue.async { + sessionQueue.asyncAfter(deadline: .now() + 0.1) { if #available(iOS 12.0, *) { os_signpost(.begin, log: log, name: "torchMode") } @@ -221,6 +227,9 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega self.session.commitConfiguration() self.addObservers() + // We need to reapply the configuration after reloading the camera + self.update(torchMode: self.torchMode) + if #available(iOS 12.0, *) { os_signpost(.end, log: log, name: "cameraType") } @@ -270,8 +279,8 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega func isBarcodeScannerEnabled(_ isEnabled: Bool, supportedBarcodeType: [AVMetadataObject.ObjectType], - onReadCode: RCTDirectEventBlock?) { - self.onReadCode = onReadCode + onBarcodeRead: ((_ barcode: String) -> Void)?) { + self.onBarcodeRead = onBarcodeRead sessionQueue.async { if #available(iOS 12.0, *) { @@ -280,36 +289,24 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega print("--------- isBarcodeScannerEnabled") - if isEnabled && onReadCode != nil { + let newTypes: [AVMetadataObject.ObjectType] + if isEnabled && onBarcodeRead != nil { let availableTypes = self.metadataOutput.availableMetadataObjectTypes - let filtered = supportedBarcodeType.filter { type in availableTypes.contains(type) } - self.metadataOutput.metadataObjectTypes = self.metadataOutput.availableMetadataObjectTypes // filtered + newTypes = supportedBarcodeType.filter { type in availableTypes.contains(type) } } else { - self.metadataOutput.metadataObjectTypes = [] + newTypes = [] } - // if isEnabled && self.metadataOutput == nil { - // self.session.beginConfiguration() - // - // let metadataOutput = AVCaptureMetadataOutput() - // if self.session.canAddOutput(metadataOutput) { - // self.session.addOutput(metadataOutput) - // metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main) - // - // let availableTypes = metadataOutput.availableMetadataObjectTypes - // metadataOutput.metadataObjectTypes = supportedBarcodeType.filter { type in availableTypes.contains(type) } - // - // self.metadataOutput = metadataOutput - // } - // - // self.session.commitConfiguration() - // } else if !isEnabled, let metadataOutput { - // self.session.beginConfiguration() - // self.session.removeOutput(metadataOutput) - // self.session.commitConfiguration() - // - // self.metadataOutput = nil - // } + if self.metadataOutput.metadataObjectTypes != newTypes { + if #available(iOS 12.0, *) { + os_signpost(.event, log: log, name: "update metadataObjectTypes") + } + + self.metadataOutput.metadataObjectTypes = newTypes + + // Setting metadataObjectTypes reloads the camera, we need to reapply the configuration + self.update(torchMode: self.torchMode) + } if #available(iOS 12.0, *) { os_signpost(.end, log: log, name: "isBarcodeScannerEnabled") @@ -318,22 +315,42 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega } func update(scannerFrameSize: CGRect?) { + guard self.scannerFrameSize != scannerFrameSize else { return } + + self.scannerFrameSize = scannerFrameSize + self.sessionQueue.async { if #available(iOS 12.0, *) { - os_signpost(.end, log: log, name: "scannerFrameSize") + os_signpost(.begin, log: log, name: "scannerFrameSize") } if !self.session.isRunning { print("setting rectOfInterest while session not running wouldn't work") + if #available(iOS 12.0, *) { + os_signpost(.end, log: log, name: "scannerFrameSize") + } return } DispatchQueue.main.async { let visibleRect = scannerFrameSize != nil && scannerFrameSize != .zero ? self.cameraPreview.previewLayer.metadataOutputRectConverted(fromLayerRect: scannerFrameSize!) : nil - print("------ update scannerFrameSize \(visibleRect ?? CGRect(x: 0, y: 0, width: 1, height: 1))") self.sessionQueue.async { + if (self.metadataOutput.rectOfInterest == visibleRect) { + if #available(iOS 12.0, *) { + os_signpost(.end, log: log, name: "scannerFrameSize") + } + return + } + + print("------ update scannerFrameSize from \(self.metadataOutput.rectOfInterest) to \(visibleRect ?? CGRect(x: 0, y: 0, width: 1, height: 1))") + + if #available(iOS 12.0, *) { + os_signpost(.event, log: log, name: "update scannerFrameSize") + } self.metadataOutput.rectOfInterest = visibleRect ?? CGRect(x: 0, y: 0, width: 1, height: 1) + // We need to reapply the configuration after touching the metadataOutput + self.update(torchMode: self.torchMode) if #available(iOS 12.0, *) { os_signpost(.end, log: log, name: "scannerFrameSize") @@ -354,13 +371,12 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega print("----------- \(codeStringValue)") - onReadCode?(["codeStringValue": codeStringValue]) - // check code is different? should pause few seconds instead and allow user to scan a second time + onBarcodeRead?(codeStringValue) } // MARK: - Private - private func setupCaptureSession() -> SetupResult { + private func setupCaptureSession(supportedBarcodeType: [AVMetadataObject.ObjectType]) -> SetupResult { guard let videoDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: cameraType.avPosition), let videoDeviceInput = try? AVCaptureDeviceInput(device: videoDevice) else { return .sessionConfigurationFailed @@ -368,6 +384,8 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega session.beginConfiguration() + session.sessionPreset = .photo + if session.canAddInput(videoDeviceInput) { session.addInput(videoDeviceInput) self.videoDeviceInput = videoDeviceInput @@ -385,7 +403,9 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega self.session.addOutput(metadataOutput) metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main) - metadataOutput.metadataObjectTypes = metadataOutput.availableMetadataObjectTypes + let availableTypes = self.metadataOutput.availableMetadataObjectTypes + let filteredTypes = supportedBarcodeType.filter { type in availableTypes.contains(type) } + metadataOutput.metadataObjectTypes = filteredTypes } session.commitConfiguration() diff --git a/ios/ReactNativeCameraKit/SimulatorCamera.swift b/ios/ReactNativeCameraKit/SimulatorCamera.swift index 197a2c8a6e..32b526e716 100644 --- a/ios/ReactNativeCameraKit/SimulatorCamera.swift +++ b/ios/ReactNativeCameraKit/SimulatorCamera.swift @@ -14,8 +14,6 @@ import os.signpost class SimulatorCamera: CameraProtocol { var previewView: UIView { mockPreview } - var onReadCode: RCTDirectEventBlock? - private var fakeFocusFinishedTimer: Timer? // Create mock camera layer. When a photo is taken, we capture this layer and save it in place of a hardware input. @@ -23,7 +21,11 @@ class SimulatorCamera: CameraProtocol { // MARK: - Public - func setup() {} + func setup(cameraType: CameraType, supportedBarcodeType: [AVMetadataObject.ObjectType]) { + DispatchQueue.main.async { + self.mockPreview.cameraTypeLabel.text = "Camera type: \(cameraType)" + } + } func cameraRemovedFromSuperview() {} func update(zoomVelocity: CGFloat) { @@ -68,7 +70,7 @@ class SimulatorCamera: CameraProtocol { func isBarcodeScannerEnabled(_ isEnabled: Bool, supportedBarcodeType: [AVMetadataObject.ObjectType], - onReadCode: RCTDirectEventBlock?) {} + onBarcodeRead: ((_ barcode: String) -> Void)?) {} func update(scannerFrameSize: CGRect?) {} func capturePicture(onWillCapture: @escaping () -> Void, diff --git a/src/Camera.ios.tsx b/src/Camera.ios.tsx index 3762e76d67..614781e75b 100644 --- a/src/Camera.ios.tsx +++ b/src/Camera.ios.tsx @@ -1,6 +1,5 @@ -import _cloneDeep from 'lodash/cloneDeep'; import React from 'react'; -import { requireNativeComponent, NativeModules, processColor } from 'react-native'; +import { requireNativeComponent, NativeModules } from 'react-native'; import { CameraApi } from './types'; import { CameraProps } from './Camera'; @@ -25,10 +24,7 @@ const Camera = React.forwardRef((props: CameraProps, ref: any) => { }, })); - const transformedProps: CameraProps = _cloneDeep(props); - transformedProps.ratioOverlayColor = processColor(props.ratioOverlayColor); - - return ; + return ; }); Camera.defaultProps = { diff --git a/src/CameraScreen.tsx b/src/CameraScreen.tsx index e69f8ee82b..1e41ccece8 100644 --- a/src/CameraScreen.tsx +++ b/src/CameraScreen.tsx @@ -126,9 +126,9 @@ export default class CameraScreen extends Component { } componentDidMount() { - let ratios: string[] = []; + let ratios: string[] = this.props.ratioOverlay ? [this.props.ratioOverlay] : []; if (this.props.cameraRatioOverlay) { - ratios = this.props.cameraRatioOverlay.ratios || []; + ratios = ratios.concat(this.props.cameraRatioOverlay.ratios || []); } // eslint-disable-next-line react/no-did-mount-set-state this.setState({ @@ -264,7 +264,7 @@ export default class CameraScreen extends Component { style={{ flex: 1, flexDirection: 'row', justifyContent: 'flex-end', alignItems: 'center', padding: 8 }} onPress={() => this.onRatioButtonPressed()} > - {this.props.ratioOverlay} + {this.state.ratios[this.state.ratioArrayPosition]} From a23aa47fa76023db357d2170fabcbbed4e188ee0 Mon Sep 17 00:00:00 2001 From: David Bertet Date: Fri, 30 Jun 2023 00:29:36 -0700 Subject: [PATCH 05/20] Remove prints & signpost --- ios/ReactNativeCameraKit/CameraView.swift | 29 ------ .../RatioOverlayView.swift | 1 - ios/ReactNativeCameraKit/RealCamera.swift | 98 ------------------- .../ScannerInterfaceView.swift | 3 - .../SimulatorCamera.swift | 2 - .../SimulatorPreviewView.swift | 1 - 6 files changed, 134 deletions(-) diff --git a/ios/ReactNativeCameraKit/CameraView.swift b/ios/ReactNativeCameraKit/CameraView.swift index b2f80367df..71e9e9ded6 100644 --- a/ios/ReactNativeCameraKit/CameraView.swift +++ b/ios/ReactNativeCameraKit/CameraView.swift @@ -6,14 +6,6 @@ import AVFoundation import UIKit -import os.signpost - -@available(iOS 12.0, *) -let log = OSLog( - subsystem: "com.bertet.app", - category: .pointsOfInterest -) - /* * View abtracting the logic unrelated to the actual camera * Like permission, ratio overlay, focus, zoom gesture, write image, etc @@ -90,11 +82,6 @@ class CameraView: UIView { } override init(frame: CGRect) { - if #available(iOS 12.0, *) { - os_signpost(.begin, log: log, name: "init CameraView") - } - - print("--------- init CameraView") #if targetEnvironment(simulator) camera = SimulatorCamera() #else @@ -122,9 +109,6 @@ class CameraView: UIView { using: { [weak self] notification in self?.orientationChanged(notification: notification) }) handleCameraPermission() - if #available(iOS 12.0, *) { - os_signpost(.end, log: log, name: "init CameraView") - } } override func removeFromSuperview() { @@ -144,8 +128,6 @@ class CameraView: UIView { override func reactSetFrame(_ frame: CGRect) { super.reactSetFrame(frame) - print("---------- CameraView reactSetFrame \(frame) \(bounds) \(Thread.current)") - camera.previewView.frame = bounds scannerInterfaceView.frame = bounds @@ -158,21 +140,14 @@ class CameraView: UIView { } override func removeReactSubview(_ subview: UIView) { - print("---------- removeReactSubview \(subview)") subview.removeFromSuperview() super.removeReactSubview(subview) } // Called once when all props have been set, then every time one is updated override func didSetProps(_ changedProps: [String]) { - if #available(iOS 12.0, *) { - os_signpost(.begin, log: log, name: "didSetProps") - } - hasPropBeenSetup = true - print("------ didSetProps \(changedProps) \(Thread.current)") - // Camera settings if changedProps.contains("cameraType") { camera.update(cameraType: cameraType) @@ -251,10 +226,6 @@ class CameraView: UIView { } } } - - if #available(iOS 12.0, *) { - os_signpost(.end, log: log, name: "didSetProps") - } } // MARK: Public diff --git a/ios/ReactNativeCameraKit/RatioOverlayView.swift b/ios/ReactNativeCameraKit/RatioOverlayView.swift index 242a2a6749..f65cc7a414 100644 --- a/ios/ReactNativeCameraKit/RatioOverlayView.swift +++ b/ios/ReactNativeCameraKit/RatioOverlayView.swift @@ -82,7 +82,6 @@ class RatioOverlayView: UIView { } func setColor(_ color: UIColor) { - print("setColor \(color)") topView.backgroundColor = color bottomView.backgroundColor = color } diff --git a/ios/ReactNativeCameraKit/RealCamera.swift b/ios/ReactNativeCameraKit/RealCamera.swift index e852f3e1e8..95f527ba23 100644 --- a/ios/ReactNativeCameraKit/RealCamera.swift +++ b/ios/ReactNativeCameraKit/RealCamera.swift @@ -6,8 +6,6 @@ import AVFoundation import UIKit -import os.signpost - /* * Real camera implementation that uses AVFoundation */ @@ -54,24 +52,14 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega func cameraRemovedFromSuperview() { sessionQueue.async { - if #available(iOS 12.0, *) { - os_signpost(.begin, log: log, name: "stopRunning") - } - if self.setupResult == .success { - print("------- stop running \(Thread.current)") self.session.stopRunning() self.removeObservers() } - - if #available(iOS 12.0, *) { - os_signpost(.end, log: log, name: "stopRunning") - } } } deinit { - print("------- deinit RealCamera \(Thread.current)") removeObservers() } @@ -80,12 +68,6 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega func setup(cameraType: CameraType, supportedBarcodeType: [AVMetadataObject.ObjectType]) { self.cameraType = cameraType - if #available(iOS 12.0, *) { - os_signpost(.begin, log: log, name: "setup") - } - - print("setup \(Thread.current)") - DispatchQueue.main.async { self.cameraPreview.session = self.session self.cameraPreview.previewLayer.videoGravity = .resizeAspectFill @@ -97,31 +79,16 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega // Because -[AVCaptureSession startRunning] is a blocking call which can take a long time. We dispatch session setup to the sessionQueue // so that the main queue isn't blocked, which keeps the UI responsive. sessionQueue.async { - if #available(iOS 12.0, *) { - os_signpost(.event, log: log, name: "Processing", "setupCaptureSession") - } - self.setupResult = self.setupCaptureSession(supportedBarcodeType: supportedBarcodeType) self.addObservers() if self.setupResult == .success { - print("---- startRunning") - if #available(iOS 12.0, *) { - os_signpost(.event, log: log, name: "Processing", "startRunning") - } self.session.startRunning() - if #available(iOS 12.0, *) { - os_signpost(.event, log: log, name: "Processing", "finished startRunning") - } // We need to reapply the configuration after starting the camera self.update(torchMode: self.torchMode) } - - if #available(iOS 12.0, *) { - os_signpost(.end, log: log, name: "setup") - } } } @@ -136,10 +103,6 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega func focus(at touchPoint: CGPoint, focusBehavior: FocusBehavior) { DispatchQueue.main.async { - if #available(iOS 12.0, *) { - os_signpost(.begin, log: log, name: "focusat") - } - let devicePoint = self.cameraPreview.previewLayer.captureDevicePointConverted(fromLayerPoint: touchPoint) self.sessionQueue.async { @@ -155,10 +118,6 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega exposeWithMode: focusBehavior.exposureMode, atDevicePoint: devicePoint, isSubjectAreaChangeMonitoringEnabled: focusBehavior.isSubjectAreaChangeMonitoringEnabled) - - if #available(iOS 12.0, *) { - os_signpost(.end, log: log, name: "focusat") - } } } } @@ -167,18 +126,9 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega self.torchMode = torchMode sessionQueue.asyncAfter(deadline: .now() + 0.1) { - if #available(iOS 12.0, *) { - os_signpost(.begin, log: log, name: "torchMode") - } - if (self.videoDeviceInput?.device.torchMode != torchMode.avTorchMode) { - print("update torchMode from from \(self.videoDeviceInput?.device.torchMode.rawValue) to \(torchMode.avTorchMode.rawValue)") self.videoDeviceInput?.device.setTorchMode(torchMode.avTorchMode) } - - if #available(iOS 12.0, *) { - os_signpost(.end, log: log, name: "torchMode") - } } } @@ -189,19 +139,11 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega func update(cameraType: CameraType) { self.cameraType = cameraType - if #available(iOS 12.0, *) { - os_signpost(.event, log: log, name: "update cameraType") - } - sessionQueue.async { if self.videoDeviceInput?.device.position == cameraType.avPosition { return } - if #available(iOS 12.0, *) { - os_signpost(.begin, log: log, name: "cameraType") - } - // Avoid chaining device inputs when camera input is denied by the user, since both front and rear vido input devices will be nil guard self.setupResult == .success, let currentViewDeviceInput = self.videoDeviceInput, @@ -229,10 +171,6 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega // We need to reapply the configuration after reloading the camera self.update(torchMode: self.torchMode) - - if #available(iOS 12.0, *) { - os_signpost(.end, log: log, name: "cameraType") - } } } @@ -283,12 +221,6 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega self.onBarcodeRead = onBarcodeRead sessionQueue.async { - if #available(iOS 12.0, *) { - os_signpost(.begin, log: log, name: "isBarcodeScannerEnabled") - } - - print("--------- isBarcodeScannerEnabled") - let newTypes: [AVMetadataObject.ObjectType] if isEnabled && onBarcodeRead != nil { let availableTypes = self.metadataOutput.availableMetadataObjectTypes @@ -298,19 +230,11 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega } if self.metadataOutput.metadataObjectTypes != newTypes { - if #available(iOS 12.0, *) { - os_signpost(.event, log: log, name: "update metadataObjectTypes") - } - self.metadataOutput.metadataObjectTypes = newTypes // Setting metadataObjectTypes reloads the camera, we need to reapply the configuration self.update(torchMode: self.torchMode) } - - if #available(iOS 12.0, *) { - os_signpost(.end, log: log, name: "isBarcodeScannerEnabled") - } } } @@ -320,15 +244,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega self.scannerFrameSize = scannerFrameSize self.sessionQueue.async { - if #available(iOS 12.0, *) { - os_signpost(.begin, log: log, name: "scannerFrameSize") - } - if !self.session.isRunning { - print("setting rectOfInterest while session not running wouldn't work") - if #available(iOS 12.0, *) { - os_signpost(.end, log: log, name: "scannerFrameSize") - } return } @@ -337,24 +253,12 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega self.sessionQueue.async { if (self.metadataOutput.rectOfInterest == visibleRect) { - if #available(iOS 12.0, *) { - os_signpost(.end, log: log, name: "scannerFrameSize") - } return } - print("------ update scannerFrameSize from \(self.metadataOutput.rectOfInterest) to \(visibleRect ?? CGRect(x: 0, y: 0, width: 1, height: 1))") - - if #available(iOS 12.0, *) { - os_signpost(.event, log: log, name: "update scannerFrameSize") - } self.metadataOutput.rectOfInterest = visibleRect ?? CGRect(x: 0, y: 0, width: 1, height: 1) // We need to reapply the configuration after touching the metadataOutput self.update(torchMode: self.torchMode) - - if #available(iOS 12.0, *) { - os_signpost(.end, log: log, name: "scannerFrameSize") - } } } } @@ -369,8 +273,6 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega return } - print("----------- \(codeStringValue)") - onBarcodeRead?(codeStringValue) } diff --git a/ios/ReactNativeCameraKit/ScannerInterfaceView.swift b/ios/ReactNativeCameraKit/ScannerInterfaceView.swift index 38d43e92a7..dd6a6c2b54 100644 --- a/ios/ReactNativeCameraKit/ScannerInterfaceView.swift +++ b/ios/ReactNativeCameraKit/ScannerInterfaceView.swift @@ -49,15 +49,12 @@ class ScannerInterfaceView: UIView { frameView.frame = CGRect(x: 0, y: 0, width: bounds.size.width - 2 * frameOffset, height: frameHeight) frameView.center = center - print("--- draw rect scannerInterface \(frameView.frame)") - updateOverlaySize(frameView.frame) } // MARK: - Public var frameSize: CGRect { - print("--- frameView.frame \(frameView.frame)") return frameView.frame } diff --git a/ios/ReactNativeCameraKit/SimulatorCamera.swift b/ios/ReactNativeCameraKit/SimulatorCamera.swift index 32b526e716..c4586b28ab 100644 --- a/ios/ReactNativeCameraKit/SimulatorCamera.swift +++ b/ios/ReactNativeCameraKit/SimulatorCamera.swift @@ -6,8 +6,6 @@ import AVFoundation import UIKit -import os.signpost - /* * Fake camera implementation to be used on simulator */ diff --git a/ios/ReactNativeCameraKit/SimulatorPreviewView.swift b/ios/ReactNativeCameraKit/SimulatorPreviewView.swift index bc33d24ad7..2da8d3a215 100644 --- a/ios/ReactNativeCameraKit/SimulatorPreviewView.swift +++ b/ios/ReactNativeCameraKit/SimulatorPreviewView.swift @@ -75,7 +75,6 @@ class SimulatorPreviewView: UIView { } func randomize() { - print("randomize \(Thread.current)") layer.backgroundColor = UIColor(hue: CGFloat(Double.random(in: 0...1)), saturation: 1.0, brightness: 1.0, alpha: 1.0).cgColor balloonLayer.removeFromSuperlayer() balloonLayer = CALayer() From 01c23678a30d16a200c47fbc37457be1ac640eb8 Mon Sep 17 00:00:00 2001 From: David Bertet Date: Fri, 30 Jun 2023 01:18:01 -0700 Subject: [PATCH 06/20] Remove unused protocol --- ios/ReactNativeCameraKit/RealCamera.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ios/ReactNativeCameraKit/RealCamera.swift b/ios/ReactNativeCameraKit/RealCamera.swift index 95f527ba23..a4857c3fc6 100644 --- a/ios/ReactNativeCameraKit/RealCamera.swift +++ b/ios/ReactNativeCameraKit/RealCamera.swift @@ -9,7 +9,7 @@ import UIKit /* * Real camera implementation that uses AVFoundation */ -class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelegate, AVCapturePhotoCaptureDelegate { +class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelegate { var previewView: UIView { cameraPreview } private let cameraPreview = RealPreviewView(frame: .zero) From 5a7df3ee804c189570099426b43cc9676ee6b0b5 Mon Sep 17 00:00:00 2001 From: Seph Soliman Date: Mon, 3 Jul 2023 20:21:01 -0700 Subject: [PATCH 07/20] Removed CameraScreen --- README.md | 60 +--- example/src/App.tsx | 8 +- example/src/CameraScreenExample.tsx | 34 --- src/CameraScreen.tsx | 443 ---------------------------- src/index.ts | 3 +- 5 files changed, 3 insertions(+), 545 deletions(-) delete mode 100644 example/src/CameraScreenExample.tsx delete mode 100644 src/CameraScreen.tsx diff --git a/README.md b/README.md index 9e1980fdd9..bf2e265a3c 100644 --- a/README.md +++ b/README.md @@ -74,43 +74,6 @@ Add the following usage descriptions to your `Info.plist` (usually found at: `io ## Components -### CameraScreen - -Full screen camera component that holds camera state and provides common camera controls. Works for most needs - -```ts -import { CameraScreen } from 'react-native-camera-kit'; -``` - -```tsx - this.onBottomButtonPressed(event)} - flashImages={{ - // optional, images for flash state button - on: require('path/to/image'), - off: require('path/to/image'), - auto: require('path/to/image'), - }} - flashImageStyle={} // optional, ImageStyle applied to flashImages - cameraFlipImage={require('path/to/image')} // optional, image for flipping camera button - cameraFlipImageStyle={} // optional, ImageStyle applied to cameraFlipImage - captureButtonImage={require('path/to/image')} // optional, image capture button - captureButtonImageStyle={} // optional, ImageStyle applied to captureButtonImage - torchOnImage={require('path/to/image')} // optional, image for toggling on flash light - torchOffImage={require('path/to/image')} // optional, image for toggling off flash light - torchImageStyle={} // optional, ImageStyle applied to torchImage - hideControls={false} // (default false) optional, hides camera controls - showCapturedImageCount={false} // (default false) optional, show count for photos taken during that capture session - cameraRatioOverlay // optional - allowCaptureRetake={false} // (default false) optional, ask for picture validation -/> -``` - ### Camera Barebones camera component if you need advanced/customized interface @@ -129,7 +92,7 @@ import { Camera, CameraType } from 'react-native-camera-kit'; #### Barcode / QR Code Scanning -Additionally, the Camera / CameraScreen can be used for barcode scanning +Additionally, the Camera can be used for barcode scanning ```tsx ``` -### CameraScreen Props (Optional) - -| Props | Type | Description | -| ------------------------- | ------------------------------------ | ------------------------------------------------------------------------------------------ | -| All Camera Props | | | -| `actions` | Actions | Labels for the buttons visible on screen | -| `onBottomButtonPressed` | `(event: BottomPressedData) => void` | Callback called when a button is pressed. `BottomPressedData` contains the data to consume | -| `flashImages` | FlashImages | Images for flash state button. Default: none, button is hidden | -| `flashImageStyle` | ImageStyle | ImageStyle applied to flashImages | -| `cameraFlipImage` | ImageSourcePropType | Image for flipping camera button. Default: none, button is hidden | -| `cameraFlipImageStyle` | ImageStyle | ImageStyle applied to cameraFlipImage | -| `captureButtonImage` | ImageSourcePropType | Image for capture button. Default: none, button is hidden | -| `captureButtonImageStyle` | ImageStyle | ImageStyle applied to captureButtonImage | -| `torchOnImage` | ImageSourcePropType | Image for toggling on flash light. Default: none, button is hidden | -| `torchOffImage` | ImageSourcePropType | Image for toggling off flash light. Default: none, button is hidden | -| `torchImageStyle` | ImageStyle | ImageStyle applied to torchOnImage/torchOffImage | -| `hideControls` | Boolean | Hides camera controls Default: `false` | -| `showCapturedImageCount` | Boolean | Show count for photos taken during that capture session. Default: `false` | -| `cameraRatioOverlay` | CameraRatioOverlay | | -| `allowCaptureRetake` | Boolean | Ask for picture validation. Default: `false` | - ### Camera Props (Optional) | Props | Type | Description | diff --git a/example/src/App.tsx b/example/src/App.tsx index 6db9121d12..51856031a3 100644 --- a/example/src/App.tsx +++ b/example/src/App.tsx @@ -6,12 +6,11 @@ import { TouchableOpacity, } from 'react-native'; -import CameraScreenExample from './CameraScreenExample'; import BarcodeScreenExample from './BarcodeScreenExample'; import CameraExample from './CameraExample'; type State = { - example?: CameraExample | CameraScreenExample | BarcodeScreenExample; + example?: any; } export default class App extends Component { @@ -43,11 +42,6 @@ export default class App extends Component { Camera - this.setState({ example: CameraScreenExample })}> - - Camera Screen - - this.setState({ example: BarcodeScreenExample })}> Barcode Scanner diff --git a/example/src/CameraScreenExample.tsx b/example/src/CameraScreenExample.tsx deleted file mode 100644 index 0ea9522cc9..0000000000 --- a/example/src/CameraScreenExample.tsx +++ /dev/null @@ -1,34 +0,0 @@ -import React, { Component } from 'react'; -import { Alert } from 'react-native'; -import CameraScreen from '../../src/CameraScreen'; - -export default class CameraScreenExample extends Component { - onBottomButtonPressed(event) { - const captureImages = JSON.stringify(event.captureImages); - Alert.alert( - `"${event.type}" Button Pressed`, - `${captureImages}`, - [{ text: 'OK', onPress: () => console.log('OK Pressed') }], - { cancelable: false }, - ); - } - - render() { - return ( - this.onBottomButtonPressed(event)} - flashImages={{ - on: require('../images/flashOn.png'), - off: require('../images/flashOff.png'), - auto: require('../images/flashAuto.png'), - }} - cameraFlipImage={require('../images/cameraFlipIcon.png')} - captureButtonImage={require('../images/cameraButton.png')} - torchOnImage={require('../images/torchOn.png')} - torchOffImage={require('../images/torchOff.png')} - showCapturedImageCount - /> - ); - } -} diff --git a/src/CameraScreen.tsx b/src/CameraScreen.tsx deleted file mode 100644 index 1e41ccece8..0000000000 --- a/src/CameraScreen.tsx +++ /dev/null @@ -1,443 +0,0 @@ -import PropTypes from 'prop-types'; -import React, { Component } from 'react'; -import { - StyleSheet, - Text, - View, - TouchableOpacity, - Image, - Dimensions, - Platform, - SafeAreaView, - ImageStyle, - ImageSourcePropType, -} from 'react-native'; -import _ from 'lodash'; -import Camera, { CameraProps } from './Camera'; -import { CameraApi, CameraType, CaptureData, FlashMode } from './types'; - -const { width, height } = Dimensions.get('window'); - -type Actions = { - leftButtonText?: string; - leftCaptureRetakeButtonText?: string; -}; - -type CameraRatioOverlay = { - ratios: string[]; -}; - -type FlashImages = { - on: ImageSourcePropType; - off: ImageSourcePropType; - auto: ImageSourcePropType; -}; - -type BottomButtonTypes = 'left' | 'capture'; - -type BottomPressedData = { - type: BottomButtonTypes; - captureImages: CaptureData[]; - captureRetakeMode: boolean; - image?: CaptureData; -}; - -type CameraScreenProps = CameraProps & { - // Controls - actions?: Actions; - flashImages?: FlashImages; - flashImageStyle?: ImageStyle; - torchOnImage?: ImageSourcePropType; - torchOffImage?: ImageSourcePropType; - torchImageStyle?: ImageStyle; - captureButtonImage?: ImageSourcePropType; - captureButtonImageStyle?: ImageStyle; - cameraFlipImage?: ImageSourcePropType; - cameraFlipImageStyle?: ImageStyle; - hideControls?: boolean; - onBottomButtonPressed?: (event: BottomPressedData) => void; - // Overlay - cameraRatioOverlay?: CameraRatioOverlay; - showCapturedImageCount?: boolean; - // Behavior - allowCaptureRetake?: boolean; -}; - -type FlashData = { - mode: FlashMode; - image?: ImageSourcePropType; -}; - -type State = { - captureImages: CaptureData[]; - flashData?: FlashData; - torchMode: boolean; - ratios: string[]; - ratioArrayPosition: number; - imageCaptured?: CaptureData; - captured: boolean; - cameraType: CameraType; -}; - -export default class CameraScreen extends Component { - static propTypes = { - allowCaptureRetake: PropTypes.bool, - }; - - static defaultProps = { - allowCaptureRetake: false, - }; - - currentFlashArrayPosition: number; - flashArray: FlashData[]; - camera: CameraApi; - - constructor(props: CameraScreenProps) { - super(props); - this.flashArray = [ - { - mode: 'auto', - image: props.flashImages?.auto, - }, - { - mode: 'on', - image: props.flashImages?.on, - }, - { - mode: 'off', - image: props.flashImages?.off, - }, - ]; - - this.currentFlashArrayPosition = this.props.flashMode - ? this.flashArray.findIndex((flashData) => flashData.mode === this.props.flashMode) - : 0; - - this.state = { - captureImages: [], - flashData: this.flashArray[this.currentFlashArrayPosition], - torchMode: this.props.torchMode === 'on' || false, - ratios: [], - ratioArrayPosition: -1, - imageCaptured: undefined, - captured: false, - cameraType: this.props.cameraType || CameraType.Back, - }; - } - - componentDidMount() { - let ratios: string[] = this.props.ratioOverlay ? [this.props.ratioOverlay] : []; - if (this.props.cameraRatioOverlay) { - ratios = ratios.concat(this.props.cameraRatioOverlay.ratios || []); - } - // eslint-disable-next-line react/no-did-mount-set-state - this.setState({ - ratios: ratios, - ratioArrayPosition: ratios.length > 0 ? 0 : -1, - }); - } - - isCaptureRetakeMode() { - return !!(this.props.allowCaptureRetake && !_.isUndefined(this.state.imageCaptured)); - } - - renderFlashButton() { - return ( - this.state.flashData?.image && - !this.isCaptureRetakeMode() && ( - this.onSetFlash()}> - - - ) - ); - } - - renderTorchButton() { - return ( - this.props.torchOnImage && - this.props.torchOffImage && - !this.isCaptureRetakeMode() && ( - this.onSetTorch()}> - - - ) - ); - } - - renderSwitchCameraButton() { - return ( - this.props.cameraFlipImage && - !this.isCaptureRetakeMode() && ( - this.onSwitchCameraPressed()}> - - - ) - ); - } - - renderTopButtons() { - return ( - !this.props.hideControls && ( - - {this.renderFlashButton()} - {this.renderSwitchCameraButton()} - {this.renderTorchButton()} - - ) - ); - } - - renderCamera() { - return ( - - {this.isCaptureRetakeMode() && this.state.imageCaptured ? ( - - ) : ( - (this.camera = cam)} - style={{ flex: 1, justifyContent: 'flex-end' }} - cameraType={this.state.cameraType} - flashMode={this.state.flashData?.mode} - torchMode={this.state.torchMode ? 'on' : 'off'} - ratioOverlay={this.state.ratios[this.state.ratioArrayPosition]} - /> - )} - - ); - } - - numberOfImagesTaken() { - const numberTook = this.state.captureImages.length; - if (numberTook >= 2) { - return numberTook; - } else if (this.state.captured) { - return '1'; - } else { - return ''; - } - } - - renderCaptureButton() { - return ( - this.props.captureButtonImage && - !this.isCaptureRetakeMode() && ( - - this.onCaptureImagePressed()}> - - {this.props.showCapturedImageCount && ( - - {this.numberOfImagesTaken()} - - )} - - - ) - ); - } - - renderRatioStrip() { - if (this.state.ratios.length === 0 || this.props.hideControls) { - return null; - } - return ( - - - Your images look best at a {this.state.ratios[0] || ''} ratio - this.onRatioButtonPressed()} - > - {this.state.ratios[this.state.ratioArrayPosition]} - - - - ); - } - - sendBottomButtonPressedAction(type: BottomButtonTypes, captureRetakeMode: boolean, image?: CaptureData) { - if (this.props.onBottomButtonPressed) { - this.props.onBottomButtonPressed({ type, captureImages: this.state.captureImages, captureRetakeMode, image }); - } - } - - onBottomButtonPressed(type: BottomButtonTypes) { - const captureRetakeMode = this.isCaptureRetakeMode(); - if (captureRetakeMode) { - if (type === 'left') { - this.setState({ imageCaptured: undefined }); - } - } else { - this.sendBottomButtonPressedAction(type, captureRetakeMode, undefined); - } - } - - renderBottomButton(type: 'left') { - const showButton = true; - if (showButton) { - const buttonNameSuffix = this.isCaptureRetakeMode() ? 'CaptureRetakeButtonText' : 'ButtonText'; - const buttonText = _(this.props).get(`actions.${type}${buttonNameSuffix}`); - return ( - this.onBottomButtonPressed(type)} - > - {buttonText} - - ); - } else { - return ; - } - } - - renderBottomButtons() { - return ( - !this.props.hideControls && ( - - {this.renderBottomButton('left')} - {this.renderCaptureButton()} - - ) - ); - } - - onSwitchCameraPressed() { - const direction = this.state.cameraType === CameraType.Back ? CameraType.Front : CameraType.Back; - this.setState({ cameraType: direction }); - } - - onSetFlash() { - this.currentFlashArrayPosition = (this.currentFlashArrayPosition + 1) % 3; - const newFlashData = this.flashArray[this.currentFlashArrayPosition]; - this.setState({ flashData: newFlashData }); - } - - onSetTorch() { - this.setState({ torchMode: !this.state.torchMode }); - } - - async onCaptureImagePressed() { - const image = await this.camera.capture(); - - if (this.props.allowCaptureRetake) { - this.setState({ imageCaptured: image }); - } else { - if (image) { - this.setState({ - captured: true, - imageCaptured: image, - captureImages: _.concat(this.state.captureImages, image), - }); - } - this.sendBottomButtonPressedAction('capture', false, image); - } - } - - onRatioButtonPressed() { - const newRatiosArrayPosition = (this.state.ratioArrayPosition + 1) % this.state.ratios.length; - this.setState({ ratioArrayPosition: newRatiosArrayPosition }); - } - - render() { - return ( - - {Platform.OS === 'android' && this.renderCamera()} - {this.renderTopButtons()} - {Platform.OS !== 'android' && this.renderCamera()} - {this.renderRatioStrip()} - {Platform.OS === 'android' && } - {this.renderBottomButtons()} - - ); - } -} - -const styles = StyleSheet.create({ - bottomButtons: { - flex: 2, - flexDirection: 'row', - justifyContent: 'space-between', - padding: 14, - }, - textStyle: { - color: 'white', - fontSize: 20, - }, - ratioBestText: { - color: 'white', - fontSize: 18, - }, - ratioText: { - color: '#ffc233', - fontSize: 18, - }, - topButtons: { - flex: 1, - flexDirection: 'row', - justifyContent: 'space-between', - paddingTop: 8, - paddingBottom: 0, - }, - cameraContainer: { - ...Platform.select({ - android: { - position: 'absolute', - top: 0, - left: 0, - width, - height, - }, - default: { - flex: 10, - flexDirection: 'column', - }, - }), - }, - captureButtonContainer: { - flex: 1, - justifyContent: 'center', - alignItems: 'center', - }, - textNumberContainer: { - position: 'absolute', - top: 0, - left: 0, - bottom: 0, - right: 0, - justifyContent: 'center', - alignItems: 'center', - }, - bottomButton: { - flex: 1, - flexDirection: 'row', - alignItems: 'center', - padding: 10, - }, - bottomContainerGap: { - flex: 1, - flexDirection: 'row', - justifyContent: 'flex-end', - alignItems: 'center', - padding: 10, - }, - gap: { - flex: 10, - flexDirection: 'column', - }, -}); diff --git a/src/index.ts b/src/index.ts index 028369f1d3..1586cb1f22 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,7 +1,6 @@ import { NativeModules } from 'react-native'; import Camera from './Camera'; -import CameraScreen from './CameraScreen'; import type { CameraApi, CameraType, CaptureData, FlashMode, FocusMode, TorchMode, ZoomMode } from './types'; const { CameraKit } = NativeModules; @@ -16,4 +15,4 @@ export const Orientation = { export default CameraKit; -export { Camera, CameraScreen, CameraType, TorchMode, FlashMode, FocusMode, ZoomMode, CameraApi, CaptureData }; +export { Camera, CameraType, TorchMode, FlashMode, FocusMode, ZoomMode, CameraApi, CaptureData }; From 37737ace6f1aea9dc0d6a817d13fa1be6e00828d Mon Sep 17 00:00:00 2001 From: Seph Soliman Date: Mon, 3 Jul 2023 20:21:11 -0700 Subject: [PATCH 08/20] Rewrote example project Using hooks/functional components Fixed zoom on second pinch Fixed orientation issues Added support for new ultra wide camera (fix blurry close ups) --- example/ios/Podfile.lock | 2 +- example/metro.config.js | 2 +- example/src/App.tsx | 7 +- example/src/BarcodeScreenExample.tsx | 328 +++++++++++++++--- example/src/CameraExample.tsx | 299 ++++++++++++++-- example/src/CheckingScreen.tsx | 46 --- ios/ReactNativeCameraKit/CameraProtocol.swift | 3 +- ios/ReactNativeCameraKit/CameraView.swift | 31 +- ios/ReactNativeCameraKit/RealCamera.swift | 170 +++++++-- .../SimulatorCamera.swift | 34 +- 10 files changed, 733 insertions(+), 189 deletions(-) delete mode 100644 example/src/CheckingScreen.tsx diff --git a/example/ios/Podfile.lock b/example/ios/Podfile.lock index 36bda45d31..caffb2a09c 100644 --- a/example/ios/Podfile.lock +++ b/example/ios/Podfile.lock @@ -626,4 +626,4 @@ SPEC CHECKSUMS: PODFILE CHECKSUM: d05c1bfc8fdb7ce1956205fcc5728007537b534c -COCOAPODS: 1.12.1 +COCOAPODS: 1.11.3 diff --git a/example/metro.config.js b/example/metro.config.js index d1f468ab03..c64ca56ddd 100644 --- a/example/metro.config.js +++ b/example/metro.config.js @@ -1,5 +1,5 @@ const path = require('path'); -const blacklist = require('metro-config/src/defaults/blacklist'); +const blacklist = require('metro-config/src/defaults/exclusionList'); const escape = require('escape-string-regexp'); const pak = require('../package.json'); diff --git a/example/src/App.tsx b/example/src/App.tsx index 51856031a3..598b3927ae 100644 --- a/example/src/App.tsx +++ b/example/src/App.tsx @@ -4,6 +4,7 @@ import { Text, View, TouchableOpacity, + ScrollView, } from 'react-native'; import BarcodeScreenExample from './BarcodeScreenExample'; @@ -26,10 +27,10 @@ export default class App extends Component { render() { if (this.state.example) { const Example = this.state.example; - return ; + return this.setState({ example: undefined })} />; } return ( - + 🎈 @@ -48,7 +49,7 @@ export default class App extends Component { - + ); } } diff --git a/example/src/BarcodeScreenExample.tsx b/example/src/BarcodeScreenExample.tsx index bd08eab611..800c3e242d 100644 --- a/example/src/BarcodeScreenExample.tsx +++ b/example/src/BarcodeScreenExample.tsx @@ -1,50 +1,282 @@ -import React, { Component } from 'react'; -import { Alert } from 'react-native'; -import CameraScreen from '../../src/CameraScreen'; -import CheckingScreen from './CheckingScreen'; - -export default class BarcodeScreenExample extends Component { - constructor(props) { - super(props); - this.state = { - example: undefined, - value: undefined, +import React, { useState, useRef, useEffect } from 'react'; +import { + StyleSheet, + Text, + View, + TouchableOpacity, + Image, + Dimensions, + Platform, + SafeAreaView, + useWindowDimensions, + Vibration, +} from 'react-native'; +import Camera from '../../src/Camera'; +import { CameraApi, CameraType, CaptureData } from '../../src/types'; + +const { width, height } = Dimensions.get('window'); + +const flashImages = { + on: require('../images/flashOn.png'), + off: require('../images/flashOff.png'), + auto: require('../images/flashAuto.png'), +}; + +const flashArray = [ + { + mode: 'auto', + image: flashImages.auto, + }, + { + mode: 'on', + image: flashImages.on, + }, + { + mode: 'off', + image: flashImages.off, + }, +] as const; + +const BarcodeExample = ({ onBack }: { onBack: () => void }) => { + const cameraRef = useRef(null); + const [currentFlashArrayPosition, setCurrentFlashArrayPosition] = useState(0); + const [captureImages, setCaptureImages] = useState([]); + const [flashData, setFlashData] = useState(flashArray[currentFlashArrayPosition]); + const [torchMode, setTorchMode] = useState(false); + // const [ratios, setRatios] = useState([]); + // const [ratioArrayPosition, setRatioArrayPosition] = useState(-1); + const [captured, setCaptured] = useState(false); + const [cameraType, setCameraType] = useState(CameraType.Back); + const [barcode, setBarcode] = useState(''); + + useEffect(() => { + const t = setTimeout(() => { + setBarcode(''); + }, 2000); + return () => { + clearTimeout(t); }; - } - - onBottomButtonPressed(event) { - const captureImages = JSON.stringify(event.captureImages); - Alert.alert( - `"${event.type}" Button Pressed`, - `${captureImages}`, - [{ text: 'OK', onPress: () => console.log('OK Pressed') }], - { cancelable: false }, - ); - } - - render() { - if (this.state.example) { - const Screen = this.state.example; - return ; + }, [barcode]); + + // useEffect(() => { + // let updatedRatios = [...ratios]; + // if (props.cameraRatioOverlay) { + // updatedRatios = updatedRatios.concat(props.cameraRatioOverlay.ratios || []); + // } + // setRatios(updatedRatios); + // setRatioArrayPosition(updatedRatios.length > 0 ? 0 : -1); + // }, []); + + const onSwitchCameraPressed = () => { + const direction = cameraType === CameraType.Back ? CameraType.Front : CameraType.Back; + setCameraType(direction); + }; + + const onSetFlash = () => { + const newPosition = (currentFlashArrayPosition + 1) % 3; + setCurrentFlashArrayPosition(newPosition); + setFlashData(flashArray[newPosition]); + }; + + const onSetTorch = () => { + setTorchMode(!torchMode); + }; + + const onCaptureImagePressed = async () => { + if (!cameraRef.current) return; + const image = await cameraRef.current.capture(); + if (image) { + setCaptured(true); + setCaptureImages([...captureImages, image]); + console.log('image', image); } - return ( - this.onBottomButtonPressed(event)} - flashImages={{ - on: require('../images/flashOn.png'), - off: require('../images/flashOff.png'), - auto: require('../images/flashAuto.png'), - }} - scanBarcode - showFrame - laserColor="red" - frameColor="white" - onReadCode={(event) => { - this.setState({ example: CheckingScreen, value: event.nativeEvent.codeStringValue }); - }} - hideControls - /> - ); - } -} + }; + + // const onRatioButtonPressed = () => { + // const newPosition = (ratioArrayPosition + 1) % ratios.length; + // setRatioArrayPosition(newPosition); + // }; + + const window = useWindowDimensions(); + const cameraRatio = 4 / 3; + + return ( + + + + {flashData.image && ( + onSetFlash()}> + + + )} + onSwitchCameraPressed()}> + + + onSetTorch()}> + + + + + + { + console.log('orientationChange', e.nativeEvent); + }} + // ratioOverlay={ratios[ratioArrayPosition]} + laserColor="red" + frameColor="white" + scanBarcode + showFrame + onReadCode={(event) => { + Vibration.vibrate(100); + setBarcode(event.nativeEvent.codeStringValue); + console.log('barcode', event.nativeEvent.codeStringValue); + }} + /> + + {/* {ratios.length > 0 && ( + + + Your images look best at a {ratios[0] || ''} ratio + onRatioButtonPressed()} + > + {ratios[ratioArrayPosition]} + + + + )} */} + + + { + onBack(); + }} + > + Back + + + onCaptureImagePressed()}> + + + + + + {barcode} + + + + + + ); +}; + +export default BarcodeExample; + +const styles = StyleSheet.create({ + top: { + zIndex: 10, + }, + topButtons: { + flexDirection: 'row', + justifyContent: 'center', + paddingVertical: 10, + + // borderColor: 'yellow', + // position: 'relative', + }, + flashMode: { + position: 'absolute', + left: 20, + top: 10, + bottom: 0, + }, + switchCamera: {}, + torch: { + position: 'absolute', + right: 20, + top: 10, + bottom: 0, + }, + cameraContainer: { + ...Platform.select({ + android: { + position: 'absolute', + top: 0, + left: 0, + width, + height, + }, + default: { + justifyContent: 'center', + flex: 1, + }, + }), + }, + + bottomButtons: { + bottom: 0, + left: 0, + right: 0, + }, + bottomButtonsInner: { + paddingVertical: 10, + }, + backBtn: { + position: 'absolute', + left: 20, + top: 0, + bottom: 0, + justifyContent: 'center', + zIndex: 10, + }, + captureButtonContainer: { + justifyContent: 'center', + alignItems: 'center', + zIndex: 9, + }, + rightBottomArea: { + position: 'absolute', + right: 20, + top: 0, + bottom: 0, + zIndex: 10, + }, + textStyle: { + color: 'white', + fontSize: 20, + }, + // ratioBestText: { + // color: 'white', + // fontSize: 18, + // }, + // ratioText: { + // color: '#ffc233', + // fontSize: 18, + // }, + textNumberContainer: { + position: 'absolute', + top: 0, + left: 0, + bottom: 0, + right: 0, + justifyContent: 'center', + alignItems: 'center', + }, + gap: { + flex: 10, + flexDirection: 'column', + }, +}); diff --git a/example/src/CameraExample.tsx b/example/src/CameraExample.tsx index 290b352c69..9bc0af6bcf 100644 --- a/example/src/CameraExample.tsx +++ b/example/src/CameraExample.tsx @@ -1,42 +1,269 @@ -import React, { Component } from 'react'; -import { View, StyleSheet } from 'react-native'; +import React, { useState, useRef } from 'react'; +import { + StyleSheet, + Text, + View, + TouchableOpacity, + Image, + Dimensions, + Platform, + SafeAreaView, + useWindowDimensions, +} from 'react-native'; import Camera from '../../src/Camera'; -import { CameraType } from '../../src/types'; +import { CameraApi, CameraType, CaptureData } from '../../src/types'; -export default class CameraExample extends Component { - render() { - return ( +const { width, height } = Dimensions.get('window'); + +const flashImages = { + on: require('../images/flashOn.png'), + off: require('../images/flashOff.png'), + auto: require('../images/flashAuto.png'), +}; + +const flashArray = [ + { + mode: 'auto', + image: flashImages.auto, + }, + { + mode: 'on', + image: flashImages.on, + }, + { + mode: 'off', + image: flashImages.off, + }, +] as const; + +const CameraExample = ({ onBack }: { onBack: () => void }) => { + const cameraRef = useRef(null); + const [currentFlashArrayPosition, setCurrentFlashArrayPosition] = useState(0); + const [captureImages, setCaptureImages] = useState([]); + const [flashData, setFlashData] = useState(flashArray[currentFlashArrayPosition]); + const [torchMode, setTorchMode] = useState(false); + const [captured, setCaptured] = useState(false); + const [cameraType, setCameraType] = useState(CameraType.Back); + const [showImageUri, setShowImageUri] = useState(''); + + const numberOfImagesTaken = () => { + const numberTook = captureImages.length; + if (numberTook >= 2) { + return numberTook; + } else if (captured) { + return '1'; + } else { + return ''; + } + }; + + const onSwitchCameraPressed = () => { + const direction = cameraType === CameraType.Back ? CameraType.Front : CameraType.Back; + setCameraType(direction); + }; + + const onSetFlash = () => { + const newPosition = (currentFlashArrayPosition + 1) % 3; + setCurrentFlashArrayPosition(newPosition); + setFlashData(flashArray[newPosition]); + }; + + const onSetTorch = () => { + setTorchMode(!torchMode); + }; + + const onCaptureImagePressed = async () => { + if (!cameraRef.current) return; + const image = await cameraRef.current.capture(); + if (image) { + setCaptured(true); + setCaptureImages([...captureImages, image]); + console.log('image', image); + } + }; + + const window = useWindowDimensions(); + const cameraRatio = 4 / 3; + + return ( + + + + {flashData.image && ( + onSetFlash()}> + + + )} + onSwitchCameraPressed()}> + + + onSetTorch()}> + + + + - console.log(event.nativeEvent.codeStringValue)} - /> + {showImageUri ? ( + + ) : ( + { + console.log('orientationChange', e.nativeEvent) + }} + /> + )} - ); - } -} + + + { + if (showImageUri) { + setShowImageUri(''); + } else { + onBack(); + } + }} + > + Back + + + onCaptureImagePressed()}> + + + {numberOfImagesTaken()} + + + + + {captureImages.length > 0 && ( + { + if (showImageUri) { + setShowImageUri(''); + } else { + setShowImageUri(captureImages[captureImages.length - 1].uri); + } + }} + > + + + )} + + + + + ); +}; -const styles = StyleSheet.create( - { - cameraContainer: { - flex: 1, - backgroundColor: 'black', - }, +export default CameraExample; + +const styles = StyleSheet.create({ + top: { + zIndex: 10, + }, + topButtons: { + flexDirection: 'row', + justifyContent: 'center', + paddingVertical: 10, + + // borderColor: 'yellow', + // position: 'relative', + }, + flashMode: { + position: 'absolute', + left: 20, + top: 10, + bottom: 0, + }, + switchCamera: {}, + torch: { + position: 'absolute', + right: 20, + top: 10, + bottom: 0, + }, + cameraContainer: { + ...Platform.select({ + android: { + position: 'absolute', + top: 0, + left: 0, + width, + height, + }, + default: { + justifyContent: 'center', + flex: 1, + // zIndex: 0 + }, + }), + }, + + bottomButtons: { + bottom: 0, + left: 0, + right: 0, + }, + bottomButtonsInner: { + paddingVertical: 10, + }, + backBtn: { + position: 'absolute', + left: 20, + top: 0, + bottom: 0, + justifyContent: 'center', + zIndex: 10, + }, + captureButtonContainer: { + justifyContent: 'center', + alignItems: 'center', + zIndex: 9, + }, + rightBottomArea: { + position: 'absolute', + right: 20, + top: 0, + bottom: 0, + zIndex: 10, + alignItems: 'center', + justifyContent: 'center', + }, + textStyle: { + color: 'white', + fontSize: 20, + }, + textNumberContainer: { + position: 'absolute', + top: 0, + left: 0, + bottom: 0, + right: 0, + justifyContent: 'center', + alignItems: 'center', + }, + gap: { + flex: 10, + flexDirection: 'column', + }, + preview: { + width: 48, + height: 48, + borderRadius: 4, }, -); +}); diff --git a/example/src/CheckingScreen.tsx b/example/src/CheckingScreen.tsx deleted file mode 100644 index a6e78bca1f..0000000000 --- a/example/src/CheckingScreen.tsx +++ /dev/null @@ -1,46 +0,0 @@ -import React, { Component } from 'react'; -import { View, TouchableOpacity, Text, StyleSheet } from 'react-native'; -import BarcodeScreen from './BarcodeScreenExample'; - -export default class CheckingScreen extends Component { - constructor(props) { - super(props); - this.state = { - example: undefined, - }; - } - - render() { - if (this.state.example) { - const CheckingScreen = this.state.example; - const value = this.state.value; - return ; - } - return ( - - {this.props.value} - this.setState({ example: BarcodeScreen })}> - Back button - - - ); - } -} - -const styles = StyleSheet.create({ - container: { - flex: 1, - paddingTop: 60, - alignItems: 'center', - backgroundColor: '#F5FCFF', - }, - valueText: { - marginBottom: 20, - fontSize: 40, - }, - buttonText: { - color: 'blue', - marginBottom: 20, - fontSize: 20, - }, -}); diff --git a/ios/ReactNativeCameraKit/CameraProtocol.swift b/ios/ReactNativeCameraKit/CameraProtocol.swift index 8a199a09a1..be1a2ee76c 100644 --- a/ios/ReactNativeCameraKit/CameraProtocol.swift +++ b/ios/ReactNativeCameraKit/CameraProtocol.swift @@ -11,10 +11,11 @@ protocol CameraProtocol: AnyObject, FocusInterfaceViewDelegate { func setup(cameraType: CameraType, supportedBarcodeType: [AVMetadataObject.ObjectType]) func cameraRemovedFromSuperview() - func update(zoomVelocity: CGFloat) + func update(zoomScale: CGFloat) func update(torchMode: TorchMode) func update(flashMode: FlashMode) func update(cameraType: CameraType) + func update(onOrientationChange: RCTDirectEventBlock?) func isBarcodeScannerEnabled(_ isEnabled: Bool, supportedBarcodeType: [AVMetadataObject.ObjectType], diff --git a/ios/ReactNativeCameraKit/CameraView.swift b/ios/ReactNativeCameraKit/CameraView.swift index 71e9e9ded6..6ed20b3ac0 100644 --- a/ios/ReactNativeCameraKit/CameraView.swift +++ b/ios/ReactNativeCameraKit/CameraView.swift @@ -101,28 +101,15 @@ class CameraView: UIView { addSubview(focusInterfaceView) focusInterfaceView.delegate = camera - // Listen to orientation changes - UIDevice.current.beginGeneratingDeviceOrientationNotifications() - NotificationCenter.default.addObserver(forName: UIDevice.orientationDidChangeNotification, - object: UIDevice.current, - queue: nil, - using: { [weak self] notification in self?.orientationChanged(notification: notification) }) - handleCameraPermission() } override func removeFromSuperview() { camera.cameraRemovedFromSuperview() - NotificationCenter.default.removeObserver(self, name: UIDevice.orientationDidChangeNotification, object: UIDevice.current) - super.removeFromSuperview() } - deinit { - UIDevice.current.endGeneratingDeviceOrientationNotifications() - } - // MARK: React lifecycle override func reactSetFrame(_ frame: CGRect) { @@ -158,6 +145,10 @@ class CameraView: UIView { if changedProps.contains("cameraType") || changedProps.contains("torchMode") { camera.update(torchMode: torchMode) } + + if changedProps.contains("onOrientationChange") { + camera.update(onOrientationChange: onOrientationChange) + } // Ratio overlay if changedProps.contains("ratioOverlay") { @@ -298,16 +289,6 @@ class CameraView: UIView { return temporaryFileURL } - private func orientationChanged(notification: Notification) { - guard let onOrientationChange, - let device = notification.object as? UIDevice, - let orientation = Orientation(from: device.orientation) else { - return - } - - onOrientationChange(["orientation": orientation.rawValue]) - } - private func onBarcodeRead(barcode: String) { // Throttle barcode detection let now = Date.timeIntervalSinceReferenceDate @@ -323,8 +304,10 @@ class CameraView: UIView { // MARK: - Gesture selectors @objc func handlePinchToZoomRecognizer(_ pinchRecognizer: UIPinchGestureRecognizer) { + var zoomScale = pinchRecognizer.scale if pinchRecognizer.state == .changed { - camera.update(zoomVelocity: pinchRecognizer.velocity) + camera.update(zoomScale: zoomScale) + pinchRecognizer.scale = 1.0 } } } diff --git a/ios/ReactNativeCameraKit/RealCamera.swift b/ios/ReactNativeCameraKit/RealCamera.swift index a4857c3fc6..b23958d5e0 100644 --- a/ios/ReactNativeCameraKit/RealCamera.swift +++ b/ios/ReactNativeCameraKit/RealCamera.swift @@ -5,6 +5,7 @@ import AVFoundation import UIKit +import CoreMotion /* * Real camera implementation that uses AVFoundation @@ -33,6 +34,10 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega private var focusFinished: (() -> Void)? private var onBarcodeRead: ((_ barcode: String) -> Void)? private var scannerFrameSize: CGRect? = nil + private var onOrientationChange: RCTDirectEventBlock? + + private var deviceOrientation = UIDeviceOrientation.portrait + private var motionManager: CMMotionManager? // KVO observation private var adjustingFocusObservation: NSKeyValueObservation? @@ -42,7 +47,16 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega // MARK: - Lifecycle override init() { - // No-op + super.init() + + // In addition to using accelerometer to determine REAL orientation + // we also listen to UI orientation changes (UIDevice does not report rotation if orientation lock is on, so photos aren't rotated correctly) + // When UIDevice reports rotation to the left, UI is rotated right to compensate, but that means we need to re-rotate left to make camera appear correctly (see self.uiOrientationChanged) + UIDevice.current.beginGeneratingDeviceOrientationNotifications() + NotificationCenter.default.addObserver(forName: UIDevice.orientationDidChangeNotification, + object: UIDevice.current, + queue: nil, + using: { [weak self] notification in self?.uiOrientationChanged(notification: notification) }) } @available(*, unavailable) @@ -57,6 +71,12 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega self.removeObservers() } } + + motionManager?.stopAccelerometerUpdates() + + NotificationCenter.default.removeObserver(self, name: UIDevice.orientationDidChangeNotification, object: UIDevice.current) + + UIDevice.current.endGeneratingDeviceOrientationNotifications() } deinit { @@ -64,7 +84,42 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega } // MARK: - Public - + + func initializeMotionManager() { + motionManager = CMMotionManager() + motionManager?.accelerometerUpdateInterval = 0.2 + motionManager?.gyroUpdateInterval = 0.2 + motionManager?.startAccelerometerUpdates(to: (OperationQueue.current)!, withHandler: { + (accelerometerData, error) -> Void in + if error != nil { + print("\(error!)") + } + guard let acceleration = accelerometerData?.acceleration else { + print("no acceleration data") + return + } + var orientationNew: UIDeviceOrientation + if acceleration.x >= 0.75 { + orientationNew = .landscapeLeft + } else if acceleration.x <= -0.75 { + orientationNew = .landscapeRight + } else if acceleration.y <= -0.75 { + orientationNew = .portrait + } else if acceleration.y >= 0.75 { + orientationNew = .portraitUpsideDown + } else { + // Consider same as last time + return + } + + if orientationNew == self.deviceOrientation { + return + } + self.deviceOrientation = orientationNew + self.onOrientationChange?(["orientation": orientationNew.rawValue]) + }) + } + func setup(cameraType: CameraType, supportedBarcodeType: [AVMetadataObject.ObjectType]) { self.cameraType = cameraType @@ -72,6 +127,8 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega self.cameraPreview.session = self.session self.cameraPreview.previewLayer.videoGravity = .resizeAspectFill } + + self.initializeMotionManager() // Setup the capture session. // In general, it is not safe to mutate an AVCaptureSession or any of its inputs, outputs, or connections from multiple threads at the same time. @@ -92,12 +149,22 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega } } - func update(zoomVelocity: CGFloat) { - guard !zoomVelocity.isNaN else { return } - + func update(zoomScale: CGFloat) { + guard !zoomScale.isNaN else { return } + sessionQueue.async { - let pinchVelocityDividerFactor: CGFloat = 20.0 - self.videoDeviceInput?.device.incrementZoomFactor(atan(zoomVelocity / pinchVelocityDividerFactor)) + guard let device = self.videoDeviceInput?.device else { return } + let zoom = device.videoZoomFactor * zoomScale + do{ + try device.lockForConfiguration() + defer {device.unlockForConfiguration()} + if zoom >= device.minAvailableVideoZoomFactor && zoom <= device.maxAvailableVideoZoomFactor { + device.videoZoomFactor = zoom + } else { + NSLog("Unable to set videoZoom: (max %f, asked %f)", device.activeFormat.videoMaxZoomFactor, zoom); + } + }catch _{ + } } } @@ -121,7 +188,11 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega } } } - + + func update(onOrientationChange: RCTDirectEventBlock?) { + self.onOrientationChange = onOrientationChange + } + func update(torchMode: TorchMode) { self.torchMode = torchMode @@ -147,7 +218,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega // Avoid chaining device inputs when camera input is denied by the user, since both front and rear vido input devices will be nil guard self.setupResult == .success, let currentViewDeviceInput = self.videoDeviceInput, - let videoDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: cameraType.avPosition), + let videoDevice = self.getBestDevice(), let videoDeviceInput = try? AVCaptureDeviceInput(device: videoDevice) else { return } @@ -183,7 +254,26 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega the main thread and session configuration is done on the session queue. */ DispatchQueue.main.async { - let videoPreviewLayerOrientation = self.cameraPreview.previewLayer.connection?.videoOrientation + var videoPreviewLayerOrientation = self.cameraPreview.previewLayer.connection?.videoOrientation + + switch(self.deviceOrientation) { + case .portrait: + videoPreviewLayerOrientation = .portrait + break + case .portraitUpsideDown: + videoPreviewLayerOrientation = .portraitUpsideDown + break + case .landscapeLeft: + videoPreviewLayerOrientation = .landscapeLeft + break + case .landscapeRight: + videoPreviewLayerOrientation = .landscapeRight + break + case .unknown: break + case .faceUp: break + case .faceDown: break + @unknown default: break + } self.sessionQueue.async { if let photoOutputConnection = self.photoOutput.connection(with: .video), let videoPreviewLayerOrientation { @@ -277,9 +367,52 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega } // MARK: - Private + + private func uiOrientationChanged(notification: Notification) { + guard let device = notification.object as? UIDevice else { + return + } + + // Counter-rotate video when in landscapeLeft/Right UI so it appears level + // (note how landscapeLeft sets landscapeRight) + switch(device.orientation) { + case .unknown: break + case .portrait: + self.cameraPreview.previewLayer.connection?.videoOrientation = .portrait + print("ui portrait") + case .portraitUpsideDown: + self.cameraPreview.previewLayer.connection?.videoOrientation = .portraitUpsideDown + print("ui upside down") + case .landscapeLeft: + self.cameraPreview.previewLayer.connection?.videoOrientation = .landscapeRight + print("ui landscapeLeft") + case .landscapeRight: + self.cameraPreview.previewLayer.connection?.videoOrientation = .landscapeLeft + print("ui landscapeRight") + case .faceUp: break + case .faceDown: break + @unknown default: break + } + } + + private func getBestDevice() -> AVCaptureDevice? { + // AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: cameraType.avPosition), + if #available(iOS 13.0, *) { + if let device = AVCaptureDevice.default(.builtInTripleCamera, for: .video, position: cameraType.avPosition) { + return device + } + } + if let device = AVCaptureDevice.default(.builtInDualCamera, for: .video, position: cameraType.avPosition) { + return device + } + if let device = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: cameraType.avPosition) { + return device + } + return nil + } private func setupCaptureSession(supportedBarcodeType: [AVMetadataObject.ObjectType]) -> SetupResult { - guard let videoDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: cameraType.avPosition), + guard let videoDevice = self.getBestDevice(), let videoDeviceInput = try? AVCaptureDeviceInput(device: videoDevice) else { return .sessionConfigurationFailed } @@ -312,29 +445,14 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega session.commitConfiguration() - self.refreshPreviewVideoOrientation() - return .success } - private func refreshPreviewVideoOrientation() { - DispatchQueue.main.async { - guard let orientation = Orientation(from: UIApplication.shared.statusBarOrientation)?.avVideoOrientation else { return } - - self.cameraPreview.previewLayer.connection?.videoOrientation = orientation - } - } - // MARK: Private observers private func addObservers() { guard adjustingFocusObservation == nil else { return } - NotificationCenter.default.addObserver(forName: UIApplication.didChangeStatusBarOrientationNotification, - object: nil, - queue: nil, - using: { [weak self] _ in self?.refreshPreviewVideoOrientation() }) - adjustingFocusObservation = videoDeviceInput?.device.observe(\.isAdjustingFocus, options: .new, changeHandler: { [weak self] device, change in diff --git a/ios/ReactNativeCameraKit/SimulatorCamera.swift b/ios/ReactNativeCameraKit/SimulatorCamera.swift index c4586b28ab..7e62d091de 100644 --- a/ios/ReactNativeCameraKit/SimulatorCamera.swift +++ b/ios/ReactNativeCameraKit/SimulatorCamera.swift @@ -10,6 +10,8 @@ import UIKit * Fake camera implementation to be used on simulator */ class SimulatorCamera: CameraProtocol { + private var onOrientationChange: RCTDirectEventBlock? + var previewView: UIView { mockPreview } private var fakeFocusFinishedTimer: Timer? @@ -23,12 +25,38 @@ class SimulatorCamera: CameraProtocol { DispatchQueue.main.async { self.mockPreview.cameraTypeLabel.text = "Camera type: \(cameraType)" } + + // Listen to orientation changes + UIDevice.current.beginGeneratingDeviceOrientationNotifications() + NotificationCenter.default.addObserver(forName: UIDevice.orientationDidChangeNotification, + object: UIDevice.current, + queue: nil, + using: { [weak self] notification in self?.orientationChanged(notification: notification) }) + + } - func cameraRemovedFromSuperview() {} + + private func orientationChanged(notification: Notification) { + guard let device = notification.object as? UIDevice, + let orientation = Orientation(from: device.orientation) else { + return + } - func update(zoomVelocity: CGFloat) { + self.onOrientationChange?(["orientation": orientation.rawValue]) + } + + func cameraRemovedFromSuperview() { + NotificationCenter.default.removeObserver(self, name: UIDevice.orientationDidChangeNotification, object: UIDevice.current) + + } + + func update(onOrientationChange: RCTDirectEventBlock?) { + self.onOrientationChange = onOrientationChange + } + + func update(zoomScale: CGFloat) { DispatchQueue.main.async { - self.mockPreview.zoomVelocityLabel.text = "Zoom Velocity: \(zoomVelocity)" + self.mockPreview.zoomVelocityLabel.text = "Zoom Velocity: \(zoomScale)" } } From fee756b55c6fad51772f880c71725acbf971e3b3 Mon Sep 17 00:00:00 2001 From: Seph Soliman Date: Mon, 3 Jul 2023 20:21:20 -0700 Subject: [PATCH 09/20] Fixed orientation issues for both orientation locked and rotated UIs Added support for thumbnails (for later) Fixed default zoom to be wide angle camera instead of ultra wide --- .prettierrc.js | 2 +- example/src/App.tsx | 22 +--- example/src/BarcodeScreenExample.tsx | 10 +- example/src/CameraExample.tsx | 59 ++++++---- ios/ReactNativeCameraKit/CameraProtocol.swift | 4 +- ios/ReactNativeCameraKit/CameraView.swift | 17 ++- .../PhotoCaptureDelegate.swift | 6 +- ios/ReactNativeCameraKit/RealCamera.swift | 106 +++++++++++------- .../SimulatorCamera.swift | 8 +- 9 files changed, 134 insertions(+), 100 deletions(-) diff --git a/.prettierrc.js b/.prettierrc.js index af6b94eeb4..3e4729ab77 100644 --- a/.prettierrc.js +++ b/.prettierrc.js @@ -7,7 +7,7 @@ module.exports = { printWidth: 120, semi: true, jsxBracketSameLine: false, - jsxSingleQuote: true, + jsxSingleQuote: false, arrowParens: 'always', }; \ No newline at end of file diff --git a/example/src/App.tsx b/example/src/App.tsx index 598b3927ae..a5fad5fceb 100644 --- a/example/src/App.tsx +++ b/example/src/App.tsx @@ -1,18 +1,12 @@ import React, { Component } from 'react'; -import { - StyleSheet, - Text, - View, - TouchableOpacity, - ScrollView, -} from 'react-native'; +import { StyleSheet, Text, View, TouchableOpacity, ScrollView } from 'react-native'; import BarcodeScreenExample from './BarcodeScreenExample'; import CameraExample from './CameraExample'; type State = { example?: any; -} +}; export default class App extends Component { state: State; @@ -33,20 +27,14 @@ export default class App extends Component { 🎈 - - React Native Camera Kit - + React Native Camera Kit this.setState({ example: CameraExample })}> - - Camera - + Camera this.setState({ example: BarcodeScreenExample })}> - - Barcode Scanner - + Barcode Scanner diff --git a/example/src/BarcodeScreenExample.tsx b/example/src/BarcodeScreenExample.tsx index 800c3e242d..ed0f928864 100644 --- a/example/src/BarcodeScreenExample.tsx +++ b/example/src/BarcodeScreenExample.tsx @@ -106,16 +106,16 @@ const BarcodeExample = ({ onBack }: { onBack: () => void }) => { {flashData.image && ( onSetFlash()}> - + )} onSwitchCameraPressed()}> - + onSetTorch()}> @@ -126,8 +126,8 @@ const BarcodeExample = ({ onBack }: { onBack: () => void }) => { style={{ width: window.width, height: window.width * cameraRatio }} cameraType={cameraType} flashMode={flashData?.mode} - zoomMode='on' - focusMode='on' + zoomMode="on" + focusMode="on" torchMode={torchMode ? 'on' : 'off'} onOrientationChange={(e) => { console.log('orientationChange', e.nativeEvent); diff --git a/example/src/CameraExample.tsx b/example/src/CameraExample.tsx index 9bc0af6bcf..631806ebd5 100644 --- a/example/src/CameraExample.tsx +++ b/example/src/CameraExample.tsx @@ -12,6 +12,7 @@ import { } from 'react-native'; import Camera from '../../src/Camera'; import { CameraApi, CameraType, CaptureData } from '../../src/types'; +import { Orientation } from '../../src'; const { width, height } = Dimensions.get('window'); @@ -46,6 +47,11 @@ const CameraExample = ({ onBack }: { onBack: () => void }) => { const [cameraType, setCameraType] = useState(CameraType.Back); const [showImageUri, setShowImageUri] = useState(''); + // iOS will error out if capturing too fast, + // so block capturing until the current capture is done + // This also minimizes issues of delayed capturing + const isCapturing = useRef(false); + const numberOfImagesTaken = () => { const numberTook = captureImages.length; if (numberTook >= 2) { @@ -73,13 +79,25 @@ const CameraExample = ({ onBack }: { onBack: () => void }) => { }; const onCaptureImagePressed = async () => { - if (!cameraRef.current) return; - const image = await cameraRef.current.capture(); - if (image) { - setCaptured(true); - setCaptureImages([...captureImages, image]); - console.log('image', image); + if (showImageUri) { + setShowImageUri(''); + return; + } + if (!cameraRef.current || isCapturing.current) return; + let image: CaptureData | undefined; + try { + isCapturing.current = true; + image = await cameraRef.current.capture(); + } catch (e) { + console.log('error', e); + } finally { + isCapturing.current = false; } + if (!image) return; + + setCaptured(true); + setCaptureImages([...captureImages, image]); + console.log('image', image); }; const window = useWindowDimensions(); @@ -91,16 +109,16 @@ const CameraExample = ({ onBack }: { onBack: () => void }) => { {flashData.image && ( onSetFlash()}> - + )} onSwitchCameraPressed()}> - + onSetTorch()}> @@ -110,35 +128,32 @@ const CameraExample = ({ onBack }: { onBack: () => void }) => { ) : ( { - console.log('orientationChange', e.nativeEvent) + // We recommend locking the camera UI to portrait (using a different library) + // and rotating the UI elements counter to the orientation + // However, we include onOrientationChange so you can match your UI to what the camera does + const isLandscape = [Orientation.LANDSCAPE_LEFT, Orientation.LANDSCAPE_RIGHT].includes( + e.nativeEvent.orientation, + ); + console.log('orientationChange', isLandscape ? 'landscape' : 'portrait'); }} /> )} - { - if (showImageUri) { - setShowImageUri(''); - } else { - onBack(); - } - }} - > + onBack()}> Back diff --git a/ios/ReactNativeCameraKit/CameraProtocol.swift b/ios/ReactNativeCameraKit/CameraProtocol.swift index be1a2ee76c..557712f523 100644 --- a/ios/ReactNativeCameraKit/CameraProtocol.swift +++ b/ios/ReactNativeCameraKit/CameraProtocol.swift @@ -11,7 +11,7 @@ protocol CameraProtocol: AnyObject, FocusInterfaceViewDelegate { func setup(cameraType: CameraType, supportedBarcodeType: [AVMetadataObject.ObjectType]) func cameraRemovedFromSuperview() - func update(zoomScale: CGFloat) + func update(pinchVelocity: CGFloat, pinchScale: CGFloat) func update(torchMode: TorchMode) func update(flashMode: FlashMode) func update(cameraType: CameraType) @@ -23,6 +23,6 @@ protocol CameraProtocol: AnyObject, FocusInterfaceViewDelegate { func update(scannerFrameSize: CGRect?) func capturePicture(onWillCapture: @escaping () -> Void, - onSuccess: @escaping (_ imageData: Data) -> (), + onSuccess: @escaping (_ imageData: Data, _ thumbnailData: Data?) -> (), onError: @escaping (_ message: String) -> ()) } diff --git a/ios/ReactNativeCameraKit/CameraView.swift b/ios/ReactNativeCameraKit/CameraView.swift index 6ed20b3ac0..965060ea7f 100644 --- a/ios/ReactNativeCameraKit/CameraView.swift +++ b/ios/ReactNativeCameraKit/CameraView.swift @@ -232,9 +232,9 @@ class CameraView: UIView { self?.camera.previewView.alpha = 1 }) } - }, onSuccess: { [weak self] imageData in + }, onSuccess: { [weak self] imageData, thumbnailData in DispatchQueue.global(qos: .default).async { - self?.writeCaptured(imageData: imageData, onSuccess: onSuccess, onError: onError) + self?.writeCaptured(imageData: imageData, thumbnailData: thumbnailData, onSuccess: onSuccess, onError: onError) self?.focusInterfaceView.resetFocus() } @@ -262,15 +262,21 @@ class CameraView: UIView { } } - private func writeCaptured(imageData: Data, + private func writeCaptured(imageData: Data, + thumbnailData: Data?, onSuccess: @escaping (_ imageObject: [String: Any]) -> (), onError: @escaping (_ error: String) -> ()) { do { let temporaryFileURL = try saveToTmpFolder(imageData) + var temporaryThumbFileURL: URL? = nil + if let t = thumbnailData { + temporaryThumbFileURL = try saveToTmpFolder(t) + } onSuccess([ "size": imageData.count, "uri": temporaryFileURL.description, - "name": temporaryFileURL.lastPathComponent + "name": temporaryFileURL.lastPathComponent, + "thumb": temporaryThumbFileURL?.description ?? "" ]) } catch { let errorMessage = "Error occurred while writing image data to a temporary file: \(error)" @@ -304,9 +310,8 @@ class CameraView: UIView { // MARK: - Gesture selectors @objc func handlePinchToZoomRecognizer(_ pinchRecognizer: UIPinchGestureRecognizer) { - var zoomScale = pinchRecognizer.scale if pinchRecognizer.state == .changed { - camera.update(zoomScale: zoomScale) + camera.update(pinchVelocity: pinchRecognizer.velocity, pinchScale: pinchRecognizer.scale) pinchRecognizer.scale = 1.0 } } diff --git a/ios/ReactNativeCameraKit/PhotoCaptureDelegate.swift b/ios/ReactNativeCameraKit/PhotoCaptureDelegate.swift index fe1e62b81f..346fb0c5fe 100644 --- a/ios/ReactNativeCameraKit/PhotoCaptureDelegate.swift +++ b/ios/ReactNativeCameraKit/PhotoCaptureDelegate.swift @@ -12,12 +12,12 @@ class PhotoCaptureDelegate: NSObject, AVCapturePhotoCaptureDelegate { private(set) var requestedPhotoSettings: AVCapturePhotoSettings private let onWillCapture: () -> Void - private let onCaptureSuccess: (_ uniqueID: Int64, _ imageData: Data) -> Void + private let onCaptureSuccess: (_ uniqueID: Int64, _ imageData: Data, _ photo: AVCapturePhoto) -> Void private let onCaptureError: (_ uniqueID: Int64, _ message: String) -> Void init(with requestedPhotoSettings: AVCapturePhotoSettings, onWillCapture: @escaping () -> Void, - onCaptureSuccess: @escaping (_ uniqueID: Int64, _ imageData: Data) -> Void, + onCaptureSuccess: @escaping (_ uniqueID: Int64, _ imageData: Data, _ photo: AVCapturePhoto) -> Void, onCaptureError: @escaping (_ uniqueID: Int64, _ errorMessage: String) -> Void) { self.requestedPhotoSettings = requestedPhotoSettings self.onWillCapture = onWillCapture @@ -43,6 +43,6 @@ class PhotoCaptureDelegate: NSObject, AVCapturePhotoCaptureDelegate { return } - onCaptureSuccess(requestedPhotoSettings.uniqueID, imageData) + onCaptureSuccess(requestedPhotoSettings.uniqueID, imageData, photo) } } diff --git a/ios/ReactNativeCameraKit/RealCamera.swift b/ios/ReactNativeCameraKit/RealCamera.swift index b23958d5e0..17569ca705 100644 --- a/ios/ReactNativeCameraKit/RealCamera.swift +++ b/ios/ReactNativeCameraKit/RealCamera.swift @@ -36,12 +36,13 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega private var scannerFrameSize: CGRect? = nil private var onOrientationChange: RCTDirectEventBlock? - private var deviceOrientation = UIDeviceOrientation.portrait + private var deviceOrientation = UIInterfaceOrientation.unknown private var motionManager: CMMotionManager? // KVO observation private var adjustingFocusObservation: NSKeyValueObservation? + // Keep delegate objects in memory to avoid collecting them before photo capturing finishes private var inProgressPhotoCaptureDelegates = [Int64: PhotoCaptureDelegate]() // MARK: - Lifecycle @@ -98,7 +99,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega print("no acceleration data") return } - var orientationNew: UIDeviceOrientation + var orientationNew: UIInterfaceOrientation if acceleration.x >= 0.75 { orientationNew = .landscapeLeft } else if acceleration.x <= -0.75 { @@ -108,7 +109,8 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega } else if acceleration.y >= 0.75 { orientationNew = .portraitUpsideDown } else { - // Consider same as last time + // Device is not clearly pointing in either direction + // (e.g. it's flat on the table, so stick with the same orientation) return } @@ -116,7 +118,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega return } self.deviceOrientation = orientationNew - self.onOrientationChange?(["orientation": orientationNew.rawValue]) + self.onOrientationChange?(["orientation": Orientation.init(from: orientationNew)!.rawValue]) }) } @@ -125,7 +127,15 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega DispatchQueue.main.async { self.cameraPreview.session = self.session - self.cameraPreview.previewLayer.videoGravity = .resizeAspectFill + self.cameraPreview.previewLayer.videoGravity = .resizeAspect + var interfaceOrientation: UIInterfaceOrientation + if #available(iOS 13.0, *) { + interfaceOrientation = self.previewView.window!.windowScene!.interfaceOrientation + } else { + interfaceOrientation = UIApplication.shared.statusBarOrientation + } + var orientation = self.counterRotatedCaptureVideoOrientationFrom(deviceOrientation: interfaceOrientation) + self.cameraPreview.previewLayer.connection?.videoOrientation = orientation! } self.initializeMotionManager() @@ -149,21 +159,20 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega } } - func update(zoomScale: CGFloat) { - guard !zoomScale.isNaN else { return } + func update(pinchVelocity: CGFloat, pinchScale: CGFloat) { + guard !pinchScale.isNaN else { return } sessionQueue.async { guard let device = self.videoDeviceInput?.device else { return } - let zoom = device.videoZoomFactor * zoomScale - do{ + do { try device.lockForConfiguration() defer {device.unlockForConfiguration()} - if zoom >= device.minAvailableVideoZoomFactor && zoom <= device.maxAvailableVideoZoomFactor { - device.videoZoomFactor = zoom - } else { - NSLog("Unable to set videoZoom: (max %f, asked %f)", device.activeFormat.videoMaxZoomFactor, zoom); - } - }catch _{ + + let pinchVelocityDividerFactor = CGFloat(10.0); + let desiredZoomFactor = device.videoZoomFactor + CGFloat(atan2f(Float(pinchVelocity), Float(pinchVelocityDividerFactor))); + device.videoZoomFactor = max(1.0, min(desiredZoomFactor, device.activeFormat.videoMaxZoomFactor)); + } catch { + NSLog("device.lockForConfiguration error: \(error))") } } } @@ -244,9 +253,25 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega self.update(torchMode: self.torchMode) } } - + + func counterRotatedCaptureVideoOrientationFrom(deviceOrientation: UIInterfaceOrientation) -> AVCaptureVideoOrientation? { + switch(deviceOrientation) { + case .portrait: + return .portrait + case .portraitUpsideDown: + return .portraitUpsideDown + case .landscapeLeft: + return .landscapeLeft + case .landscapeRight: + return .landscapeRight + case .unknown: break + @unknown default: break + } + return nil + } + func capturePicture(onWillCapture: @escaping () -> Void, - onSuccess: @escaping (_ imageData: Data) -> Void, + onSuccess: @escaping (_ imageData: Data, _ thumbnailData: Data?) -> Void, onError: @escaping (_ message: String) -> Void) { /* Retrieve the video preview layer's video orientation on the main queue before @@ -254,26 +279,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega the main thread and session configuration is done on the session queue. */ DispatchQueue.main.async { - var videoPreviewLayerOrientation = self.cameraPreview.previewLayer.connection?.videoOrientation - - switch(self.deviceOrientation) { - case .portrait: - videoPreviewLayerOrientation = .portrait - break - case .portraitUpsideDown: - videoPreviewLayerOrientation = .portraitUpsideDown - break - case .landscapeLeft: - videoPreviewLayerOrientation = .landscapeLeft - break - case .landscapeRight: - videoPreviewLayerOrientation = .landscapeRight - break - case .unknown: break - case .faceUp: break - case .faceDown: break - @unknown default: break - } + var videoPreviewLayerOrientation = self.counterRotatedCaptureVideoOrientationFrom(deviceOrientation: self.deviceOrientation) ?? self.cameraPreview.previewLayer.connection?.videoOrientation self.sessionQueue.async { if let photoOutputConnection = self.photoOutput.connection(with: .video), let videoPreviewLayerOrientation { @@ -290,14 +296,23 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega let photoCaptureDelegate = PhotoCaptureDelegate( with: settings, onWillCapture: onWillCapture, - onCaptureSuccess: { uniqueID, imageData in + onCaptureSuccess: { uniqueID, imageData, photo in self.inProgressPhotoCaptureDelegates[uniqueID] = nil - onSuccess(imageData) + + var thumbnailData: Data? = nil + if let previewPixelBuffer = photo.previewPixelBuffer { + let ciImage = CIImage(cvPixelBuffer: previewPixelBuffer) + let uiImage = UIImage(ciImage: ciImage) + thumbnailData = uiImage.jpegData(compressionQuality: 0.7) + } + + onSuccess(imageData, thumbnailData) }, onCaptureError: { uniqueID, errorMessage in self.inProgressPhotoCaptureDelegates[uniqueID] = nil onError(errorMessage) - }) + } + ) self.inProgressPhotoCaptureDelegates[photoCaptureDelegate.requestedPhotoSettings.uniqueID] = photoCaptureDelegate self.photoOutput.capturePhoto(with: settings, delegate: photoCaptureDelegate) @@ -442,6 +457,17 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega let filteredTypes = supportedBarcodeType.filter { type in availableTypes.contains(type) } metadataOutput.metadataObjectTypes = filteredTypes } + + // Find the 'normal' zoom factor, which on the native camera defaults to the wide angle + var wideAngleZoomFactor = 1.0 + if #available(iOS 13.0, *) { + if let indexOfWideAngle = videoDevice.constituentDevices.firstIndex(where: { device in device.deviceType == .builtInWideAngleCamera }) { + // .virtualDeviceSwitchOverVideoZoomFactors has the .constituentDevices zoom factor which borders the NEXT device + // so we grab the one PRIOR to the wide angle to get the wide angle's zoom factor + wideAngleZoomFactor = videoDevice.virtualDeviceSwitchOverVideoZoomFactors[indexOfWideAngle - 1].doubleValue + } + } + self.videoDeviceInput?.device.videoZoomFactor = wideAngleZoomFactor session.commitConfiguration() diff --git a/ios/ReactNativeCameraKit/SimulatorCamera.swift b/ios/ReactNativeCameraKit/SimulatorCamera.swift index 7e62d091de..7012782d0c 100644 --- a/ios/ReactNativeCameraKit/SimulatorCamera.swift +++ b/ios/ReactNativeCameraKit/SimulatorCamera.swift @@ -54,9 +54,9 @@ class SimulatorCamera: CameraProtocol { self.onOrientationChange = onOrientationChange } - func update(zoomScale: CGFloat) { + func update(pinchVelocity: CGFloat, pinchScale: CGFloat) { DispatchQueue.main.async { - self.mockPreview.zoomVelocityLabel.text = "Zoom Velocity: \(zoomScale)" + self.mockPreview.zoomVelocityLabel.text = "Zoom Velocity: \(pinchVelocity)" } } @@ -100,7 +100,7 @@ class SimulatorCamera: CameraProtocol { func update(scannerFrameSize: CGRect?) {} func capturePicture(onWillCapture: @escaping () -> Void, - onSuccess: @escaping (_ imageData: Data) -> (), + onSuccess: @escaping (_ imageData: Data, _ thumbnailData: Data?) -> (), onError: @escaping (_ message: String) -> ()) { onWillCapture() @@ -111,7 +111,7 @@ class SimulatorCamera: CameraProtocol { // Then switch to background thread DispatchQueue.global(qos: .default).async { if let imageData = previewSnapshot?.jpegData(compressionQuality: 0.85) { - onSuccess(imageData) + onSuccess(imageData, nil) } else { onError("Failed to convert snapshot to JPEG data") } From 8b346669deb4d392aaaea52bad0231d57908a51c Mon Sep 17 00:00:00 2001 From: David Bertet Date: Tue, 4 Jul 2023 01:16:23 -0700 Subject: [PATCH 10/20] Small refinements Remove cameraType from the class, use local variable only to avoid unwanted side effects Move thumbnail creation into delegate, to be consistent and return only data Make `counterRotatedCaptureVideoOrientationFrom` private Refactor slightly `update(pinchVelocity` Remove `setTorchMode`, add `scanThrottleDelay` from React types --- README.md | 2 +- .../AVCaptureDevice+Setter.swift | 21 +++-- ios/ReactNativeCameraKit/CameraView.swift | 18 ++--- .../PhotoCaptureDelegate.swift | 13 ++- ios/ReactNativeCameraKit/RealCamera.swift | 81 +++++++------------ .../SimulatorCamera.swift | 2 +- src/Camera.d.ts | 1 + src/Camera.ios.tsx | 3 - src/types.ts | 1 - 9 files changed, 63 insertions(+), 79 deletions(-) diff --git a/README.md b/README.md index bf2e265a3c..726764e5b1 100644 --- a/README.md +++ b/README.md @@ -119,7 +119,7 @@ Additionally, the Camera can be used for barcode scanning | `cameraType` | CameraType.Back/CameraType.Front | Choose what camera to use. Default: `CameraType.Back` | | `onOrientationChange` | Function | Callback when physical device orientation changes. Returned event contains `orientation`. Ex: `onOrientationChange={(event) => console.log(event.nativeEvent.orientation)}`. Use `import { Orientation } from 'react-native-camera-kit'; if (event.nativeEvent.orientation === Orientation.PORTRAIT) { ... }` to understand the new value | | **iOS only** | -| `ratioOverlay` | `['int:int', ...]` | Show a guiding overlay in the camera preview for the selected ratio. Does not crop image as of v9.0. Example: `['16:9', '1:1', '3:4']` | +| `ratioOverlay` | `'int:int'` | Show a guiding overlay in the camera preview for the selected ratio. Does not crop image as of v9.0. Example: `'16:9'` | | `ratioOverlayColor` | Color | Any color with alpha. Default: `'#ffffff77'` | | `resetFocusTimeout` | Number | Dismiss tap to focus after this many milliseconds. Default `0` (disabled). Example: `5000` is 5 seconds. | | `resetFocusWhenMotionDetected` | Boolean | Dismiss tap to focus when focus area content changes. Native iOS feature, see documentation: https://developer.apple.com/documentation/avfoundation/avcapturedevice/1624644-subjectareachangemonitoringenabl?language=objc). Default `true`. | diff --git a/ios/ReactNativeCameraKit/AVCaptureDevice+Setter.swift b/ios/ReactNativeCameraKit/AVCaptureDevice+Setter.swift index fb3fb6c9d6..2e8641191d 100644 --- a/ios/ReactNativeCameraKit/AVCaptureDevice+Setter.swift +++ b/ios/ReactNativeCameraKit/AVCaptureDevice+Setter.swift @@ -10,8 +10,10 @@ extension AVCaptureDevice { if isTorchModeSupported(newTorchMode) && hasTorch { do { try lockForConfiguration() + + defer { unlockForConfiguration() } + torchMode = newTorchMode - unlockForConfiguration() } catch { print("Error setting torch mode: \(error)") } @@ -22,18 +24,14 @@ extension AVCaptureDevice { do { try lockForConfiguration() - var zoomFactor = videoZoomFactor + zoomFactorIncrement - if zoomFactor > activeFormat.videoMaxZoomFactor { - zoomFactor = activeFormat.videoMaxZoomFactor - } else if zoomFactor < 1 { - zoomFactor = 1.0 - } - videoZoomFactor = zoomFactor - unlockForConfiguration() + defer { unlockForConfiguration() } + + let desiredZoomFactor = videoZoomFactor + zoomFactorIncrement + videoZoomFactor = max(1.0, min(desiredZoomFactor, activeFormat.videoMaxZoomFactor)) } catch { print("Error setting zoom factor: \(error)") } - } + } func focusWithMode(_ focusMode: AVCaptureDevice.FocusMode, exposeWithMode exposureMode: AVCaptureDevice.ExposureMode, @@ -42,6 +40,8 @@ extension AVCaptureDevice { do { try lockForConfiguration() + defer { unlockForConfiguration() } + if isFocusPointOfInterestSupported && isFocusModeSupported(focusMode) { focusPointOfInterest = point self.focusMode = focusMode @@ -53,7 +53,6 @@ extension AVCaptureDevice { } self.isSubjectAreaChangeMonitoringEnabled = isSubjectAreaChangeMonitoringEnabled - unlockForConfiguration() } catch { print("Error setting focus: \(error)") } diff --git a/ios/ReactNativeCameraKit/CameraView.swift b/ios/ReactNativeCameraKit/CameraView.swift index 965060ea7f..f10cae080c 100644 --- a/ios/ReactNativeCameraKit/CameraView.swift +++ b/ios/ReactNativeCameraKit/CameraView.swift @@ -267,16 +267,14 @@ class CameraView: UIView { onSuccess: @escaping (_ imageObject: [String: Any]) -> (), onError: @escaping (_ error: String) -> ()) { do { - let temporaryFileURL = try saveToTmpFolder(imageData) - var temporaryThumbFileURL: URL? = nil - if let t = thumbnailData { - temporaryThumbFileURL = try saveToTmpFolder(t) - } + let temporaryImageFileURL = try saveToTmpFolder(imageData) + var temporaryThumbnailFileURL = try saveToTmpFolder(thumbnailData) + onSuccess([ "size": imageData.count, - "uri": temporaryFileURL.description, - "name": temporaryFileURL.lastPathComponent, - "thumb": temporaryThumbFileURL?.description ?? "" + "uri": temporaryImageFileURL?.description, + "name": temporaryImageFileURL?.lastPathComponent, + "thumb": temporaryThumbnailFileURL?.description ?? "" ]) } catch { let errorMessage = "Error occurred while writing image data to a temporary file: \(error)" @@ -285,7 +283,9 @@ class CameraView: UIView { } } - private func saveToTmpFolder(_ data: Data) throws -> URL { + private func saveToTmpFolder(_ data: Data?) throws -> URL? { + guard let data else { return nil } + let temporaryFileName = ProcessInfo.processInfo.globallyUniqueString let temporaryFilePath = (NSTemporaryDirectory() as NSString).appendingPathComponent(temporaryFileName).appending(".jpg") let temporaryFileURL = URL(fileURLWithPath: temporaryFilePath) diff --git a/ios/ReactNativeCameraKit/PhotoCaptureDelegate.swift b/ios/ReactNativeCameraKit/PhotoCaptureDelegate.swift index 346fb0c5fe..42b4a6eeab 100644 --- a/ios/ReactNativeCameraKit/PhotoCaptureDelegate.swift +++ b/ios/ReactNativeCameraKit/PhotoCaptureDelegate.swift @@ -12,12 +12,12 @@ class PhotoCaptureDelegate: NSObject, AVCapturePhotoCaptureDelegate { private(set) var requestedPhotoSettings: AVCapturePhotoSettings private let onWillCapture: () -> Void - private let onCaptureSuccess: (_ uniqueID: Int64, _ imageData: Data, _ photo: AVCapturePhoto) -> Void + private let onCaptureSuccess: (_ uniqueID: Int64, _ imageData: Data, _ thumbnailData: Data?) -> Void private let onCaptureError: (_ uniqueID: Int64, _ message: String) -> Void init(with requestedPhotoSettings: AVCapturePhotoSettings, onWillCapture: @escaping () -> Void, - onCaptureSuccess: @escaping (_ uniqueID: Int64, _ imageData: Data, _ photo: AVCapturePhoto) -> Void, + onCaptureSuccess: @escaping (_ uniqueID: Int64, _ imageData: Data, _ thumbnailData: Data?) -> Void, onCaptureError: @escaping (_ uniqueID: Int64, _ errorMessage: String) -> Void) { self.requestedPhotoSettings = requestedPhotoSettings self.onWillCapture = onWillCapture @@ -43,6 +43,13 @@ class PhotoCaptureDelegate: NSObject, AVCapturePhotoCaptureDelegate { return } - onCaptureSuccess(requestedPhotoSettings.uniqueID, imageData, photo) + var thumbnailData: Data? = nil + if let previewPixelBuffer = photo.previewPixelBuffer { + let ciImage = CIImage(cvPixelBuffer: previewPixelBuffer) + let uiImage = UIImage(ciImage: ciImage) + thumbnailData = uiImage.jpegData(compressionQuality: 0.7) + } + + onCaptureSuccess(requestedPhotoSettings.uniqueID, imageData, thumbnailData) } } diff --git a/ios/ReactNativeCameraKit/RealCamera.swift b/ios/ReactNativeCameraKit/RealCamera.swift index 17569ca705..7a730e193a 100644 --- a/ios/ReactNativeCameraKit/RealCamera.swift +++ b/ios/ReactNativeCameraKit/RealCamera.swift @@ -27,7 +27,6 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega private let photoOutput = AVCapturePhotoOutput() private let metadataOutput = AVCaptureMetadataOutput() - private var cameraType: CameraType = .back private var flashMode: FlashMode = .auto private var torchMode: TorchMode = .off private var resetFocus: (() -> Void)? @@ -123,8 +122,6 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega } func setup(cameraType: CameraType, supportedBarcodeType: [AVMetadataObject.ObjectType]) { - self.cameraType = cameraType - DispatchQueue.main.async { self.cameraPreview.session = self.session self.cameraPreview.previewLayer.videoGravity = .resizeAspect @@ -146,7 +143,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega // Because -[AVCaptureSession startRunning] is a blocking call which can take a long time. We dispatch session setup to the sessionQueue // so that the main queue isn't blocked, which keeps the UI responsive. sessionQueue.async { - self.setupResult = self.setupCaptureSession(supportedBarcodeType: supportedBarcodeType) + self.setupResult = self.setupCaptureSession(cameraType: cameraType, supportedBarcodeType: supportedBarcodeType) self.addObservers() @@ -163,17 +160,9 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega guard !pinchScale.isNaN else { return } sessionQueue.async { - guard let device = self.videoDeviceInput?.device else { return } - do { - try device.lockForConfiguration() - defer {device.unlockForConfiguration()} - - let pinchVelocityDividerFactor = CGFloat(10.0); - let desiredZoomFactor = device.videoZoomFactor + CGFloat(atan2f(Float(pinchVelocity), Float(pinchVelocityDividerFactor))); - device.videoZoomFactor = max(1.0, min(desiredZoomFactor, device.activeFormat.videoMaxZoomFactor)); - } catch { - NSLog("device.lockForConfiguration error: \(error))") - } + let pinchVelocityDividerFactor: Float = 10.0 + let incrementZoomFactor = CGFloat(atan2f(Float(pinchVelocity), pinchVelocityDividerFactor)) + self.videoDeviceInput?.device.incrementZoomFactor(incrementZoomFactor) } } @@ -217,8 +206,6 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega } func update(cameraType: CameraType) { - self.cameraType = cameraType - sessionQueue.async { if self.videoDeviceInput?.device.position == cameraType.avPosition { return @@ -227,7 +214,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega // Avoid chaining device inputs when camera input is denied by the user, since both front and rear vido input devices will be nil guard self.setupResult == .success, let currentViewDeviceInput = self.videoDeviceInput, - let videoDevice = self.getBestDevice(), + let videoDevice = self.getBestDevice(for: cameraType), let videoDeviceInput = try? AVCaptureDeviceInput(device: videoDevice) else { return } @@ -253,23 +240,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega self.update(torchMode: self.torchMode) } } - - func counterRotatedCaptureVideoOrientationFrom(deviceOrientation: UIInterfaceOrientation) -> AVCaptureVideoOrientation? { - switch(deviceOrientation) { - case .portrait: - return .portrait - case .portraitUpsideDown: - return .portraitUpsideDown - case .landscapeLeft: - return .landscapeLeft - case .landscapeRight: - return .landscapeRight - case .unknown: break - @unknown default: break - } - return nil - } - + func capturePicture(onWillCapture: @escaping () -> Void, onSuccess: @escaping (_ imageData: Data, _ thumbnailData: Data?) -> Void, onError: @escaping (_ message: String) -> Void) { @@ -296,16 +267,9 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega let photoCaptureDelegate = PhotoCaptureDelegate( with: settings, onWillCapture: onWillCapture, - onCaptureSuccess: { uniqueID, imageData, photo in + onCaptureSuccess: { uniqueID, imageData, thumbnailData in self.inProgressPhotoCaptureDelegates[uniqueID] = nil - var thumbnailData: Data? = nil - if let previewPixelBuffer = photo.previewPixelBuffer { - let ciImage = CIImage(cvPixelBuffer: previewPixelBuffer) - let uiImage = UIImage(ciImage: ciImage) - thumbnailData = uiImage.jpegData(compressionQuality: 0.7) - } - onSuccess(imageData, thumbnailData) }, onCaptureError: { uniqueID, errorMessage in @@ -382,7 +346,23 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega } // MARK: - Private - + + private func counterRotatedCaptureVideoOrientationFrom(deviceOrientation: UIInterfaceOrientation) -> AVCaptureVideoOrientation? { + switch(deviceOrientation) { + case .portrait: + return .portrait + case .portraitUpsideDown: + return .portraitUpsideDown + case .landscapeLeft: + return .landscapeLeft + case .landscapeRight: + return .landscapeRight + case .unknown: break + @unknown default: break + } + return nil + } + private func uiOrientationChanged(notification: Notification) { guard let device = notification.object as? UIDevice else { return @@ -410,8 +390,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega } } - private func getBestDevice() -> AVCaptureDevice? { - // AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: cameraType.avPosition), + private func getBestDevice(for cameraType: CameraType) -> AVCaptureDevice? { if #available(iOS 13.0, *) { if let device = AVCaptureDevice.default(.builtInTripleCamera, for: .video, position: cameraType.avPosition) { return device @@ -426,8 +405,9 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega return nil } - private func setupCaptureSession(supportedBarcodeType: [AVMetadataObject.ObjectType]) -> SetupResult { - guard let videoDevice = self.getBestDevice(), + private func setupCaptureSession(cameraType: CameraType, + supportedBarcodeType: [AVMetadataObject.ObjectType]) -> SetupResult { + guard let videoDevice = self.getBestDevice(for: cameraType), let videoDeviceInput = try? AVCaptureDeviceInput(device: videoDevice) else { return .sessionConfigurationFailed } @@ -457,8 +437,9 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega let filteredTypes = supportedBarcodeType.filter { type in availableTypes.contains(type) } metadataOutput.metadataObjectTypes = filteredTypes } - - // Find the 'normal' zoom factor, which on the native camera defaults to the wide angle + + // Devices that have multiple physical cameras are binded behind one virtual camera input. The zoom factor defines what physical camera it actually uses + // Find the 'normal' zoom factor, which on the physical camera defaults to the wide angle var wideAngleZoomFactor = 1.0 if #available(iOS 13.0, *) { if let indexOfWideAngle = videoDevice.constituentDevices.firstIndex(where: { device in device.deviceType == .builtInWideAngleCamera }) { diff --git a/ios/ReactNativeCameraKit/SimulatorCamera.swift b/ios/ReactNativeCameraKit/SimulatorCamera.swift index 7012782d0c..28fddc65b6 100644 --- a/ios/ReactNativeCameraKit/SimulatorCamera.swift +++ b/ios/ReactNativeCameraKit/SimulatorCamera.swift @@ -56,7 +56,7 @@ class SimulatorCamera: CameraProtocol { func update(pinchVelocity: CGFloat, pinchScale: CGFloat) { DispatchQueue.main.async { - self.mockPreview.zoomVelocityLabel.text = "Zoom Velocity: \(pinchVelocity)" + self.mockPreview.zoomVelocityLabel.text = "Zoom Velocity: \(pinchVelocity), Scale: \(pinchScale)" } } diff --git a/src/Camera.d.ts b/src/Camera.d.ts index 0a3d3a81e4..8464b4002b 100644 --- a/src/Camera.d.ts +++ b/src/Camera.d.ts @@ -34,6 +34,7 @@ export interface CameraProps { ratioOverlayColor?: number | string; resetFocusTimeout?: number; resetFocusWhenMotionDetected?: boolean; + scanThrottleDelay: number; } declare const Camera: React.FC; diff --git a/src/Camera.ios.tsx b/src/Camera.ios.tsx index 614781e75b..c821db51f2 100644 --- a/src/Camera.ios.tsx +++ b/src/Camera.ios.tsx @@ -13,9 +13,6 @@ const Camera = React.forwardRef((props: CameraProps, ref: any) => { capture: async () => { return await CKCameraManager.capture({}); }, - setTorchMode: (mode = 'off') => { - CKCameraManager.setTorchMode(mode); - }, requestDeviceCameraAuthorization: async () => { return await CKCameraManager.checkDeviceCameraAuthorizationStatus(); }, diff --git a/src/types.ts b/src/types.ts index 24d6c8dad7..15d70b3792 100644 --- a/src/types.ts +++ b/src/types.ts @@ -25,7 +25,6 @@ export type CaptureData = { export type CameraApi = { capture: () => Promise; - setTorchMode: (mode: TorchMode) => void; requestDeviceCameraAuthorization: () => Promise; checkDeviceCameraAuthorizationStatus: () => Promise; }; From d92b40b20291044c2cb3d93a7570b7e28a030ec2 Mon Sep 17 00:00:00 2001 From: David Bertet Date: Tue, 4 Jul 2023 01:40:17 -0700 Subject: [PATCH 11/20] Make button targets bigger by adding some padding --- example/src/BarcodeScreenExample.tsx | 19 +++++++++++-------- example/src/CameraExample.tsx | 19 +++++++++++-------- 2 files changed, 22 insertions(+), 16 deletions(-) diff --git a/example/src/BarcodeScreenExample.tsx b/example/src/BarcodeScreenExample.tsx index ed0f928864..678830c24d 100644 --- a/example/src/BarcodeScreenExample.tsx +++ b/example/src/BarcodeScreenExample.tsx @@ -192,23 +192,25 @@ const styles = StyleSheet.create({ topButtons: { flexDirection: 'row', justifyContent: 'center', - paddingVertical: 10, - // borderColor: 'yellow', // position: 'relative', }, flashMode: { position: 'absolute', - left: 20, - top: 10, + left: 10, + top: 0, bottom: 0, + padding: 10, + }, + switchCamera: { + padding: 10, }, - switchCamera: {}, torch: { position: 'absolute', - right: 20, - top: 10, + right: 10, + top: 0, bottom: 0, + padding: 10, }, cameraContainer: { ...Platform.select({ @@ -236,11 +238,12 @@ const styles = StyleSheet.create({ }, backBtn: { position: 'absolute', - left: 20, + left: 10, top: 0, bottom: 0, justifyContent: 'center', zIndex: 10, + padding: 10, }, captureButtonContainer: { justifyContent: 'center', diff --git a/example/src/CameraExample.tsx b/example/src/CameraExample.tsx index 631806ebd5..2a2724ba9c 100644 --- a/example/src/CameraExample.tsx +++ b/example/src/CameraExample.tsx @@ -194,23 +194,25 @@ const styles = StyleSheet.create({ topButtons: { flexDirection: 'row', justifyContent: 'center', - paddingVertical: 10, - // borderColor: 'yellow', // position: 'relative', }, flashMode: { position: 'absolute', - left: 20, - top: 10, + left: 10, + top: 0, bottom: 0, + padding: 10, + }, + switchCamera: { + padding: 10, }, - switchCamera: {}, torch: { position: 'absolute', - right: 20, - top: 10, + right: 10, + top: 0, bottom: 0, + padding: 10, }, cameraContainer: { ...Platform.select({ @@ -239,11 +241,12 @@ const styles = StyleSheet.create({ }, backBtn: { position: 'absolute', - left: 20, + left: 10, top: 0, bottom: 0, justifyContent: 'center', zIndex: 10, + padding: 10, }, captureButtonContainer: { justifyContent: 'center', From 6c7e0fe500f6ff5cfefd03d0f1bb201534cc0ae4 Mon Sep 17 00:00:00 2001 From: David Bertet Date: Tue, 4 Jul 2023 01:57:54 -0700 Subject: [PATCH 12/20] Fix default zoomFactor when we switch camera --- ios/ReactNativeCameraKit/RealCamera.swift | 23 ++++++++++++++--------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/ios/ReactNativeCameraKit/RealCamera.swift b/ios/ReactNativeCameraKit/RealCamera.swift index 7a730e193a..2dc318c5f1 100644 --- a/ios/ReactNativeCameraKit/RealCamera.swift +++ b/ios/ReactNativeCameraKit/RealCamera.swift @@ -131,7 +131,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega } else { interfaceOrientation = UIApplication.shared.statusBarOrientation } - var orientation = self.counterRotatedCaptureVideoOrientationFrom(deviceOrientation: interfaceOrientation) + let orientation = self.counterRotatedCaptureVideoOrientationFrom(deviceOrientation: interfaceOrientation) self.cameraPreview.previewLayer.connection?.videoOrientation = orientation! } @@ -227,6 +227,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega if self.session.canAddInput(videoDeviceInput) { self.session.addInput(videoDeviceInput) + videoDevice.videoZoomFactor = self.wideAngleZoomFactor(for: videoDevice) self.videoDeviceInput = videoDeviceInput } else { // If it fails, put back current camera @@ -250,7 +251,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega the main thread and session configuration is done on the session queue. */ DispatchQueue.main.async { - var videoPreviewLayerOrientation = self.counterRotatedCaptureVideoOrientationFrom(deviceOrientation: self.deviceOrientation) ?? self.cameraPreview.previewLayer.connection?.videoOrientation + let videoPreviewLayerOrientation = self.counterRotatedCaptureVideoOrientationFrom(deviceOrientation: self.deviceOrientation) ?? self.cameraPreview.previewLayer.connection?.videoOrientation self.sessionQueue.async { if let photoOutputConnection = self.photoOutput.connection(with: .video), let videoPreviewLayerOrientation { @@ -418,11 +419,13 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega if session.canAddInput(videoDeviceInput) { session.addInput(videoDeviceInput) + videoDevice.videoZoomFactor = wideAngleZoomFactor(for: videoDevice) self.videoDeviceInput = videoDeviceInput } else { return .sessionConfigurationFailed } + if session.canAddOutput(photoOutput) { session.addOutput(photoOutput) } else { @@ -438,21 +441,23 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega metadataOutput.metadataObjectTypes = filteredTypes } + session.commitConfiguration() + + return .success + } + + private func wideAngleZoomFactor(for videoDevice: AVCaptureDevice) -> CGFloat { // Devices that have multiple physical cameras are binded behind one virtual camera input. The zoom factor defines what physical camera it actually uses // Find the 'normal' zoom factor, which on the physical camera defaults to the wide angle - var wideAngleZoomFactor = 1.0 if #available(iOS 13.0, *) { - if let indexOfWideAngle = videoDevice.constituentDevices.firstIndex(where: { device in device.deviceType == .builtInWideAngleCamera }) { + if let indexOfWideAngle = videoDevice.constituentDevices.firstIndex(where: { $0.deviceType == .builtInWideAngleCamera }) { // .virtualDeviceSwitchOverVideoZoomFactors has the .constituentDevices zoom factor which borders the NEXT device // so we grab the one PRIOR to the wide angle to get the wide angle's zoom factor - wideAngleZoomFactor = videoDevice.virtualDeviceSwitchOverVideoZoomFactors[indexOfWideAngle - 1].doubleValue + return videoDevice.virtualDeviceSwitchOverVideoZoomFactors[indexOfWideAngle - 1].doubleValue } } - self.videoDeviceInput?.device.videoZoomFactor = wideAngleZoomFactor - - session.commitConfiguration() - return .success + return 1.0 } // MARK: Private observers From d738b6c8d220936c2f3f62ca23cd141e4c82f1d1 Mon Sep 17 00:00:00 2001 From: David Bertet Date: Tue, 4 Jul 2023 12:43:32 -0700 Subject: [PATCH 13/20] Using hooks/functional component for App --- example/src/App.tsx | 61 +++++++++++++++++++-------------------------- 1 file changed, 26 insertions(+), 35 deletions(-) diff --git a/example/src/App.tsx b/example/src/App.tsx index a5fad5fceb..cba2fff145 100644 --- a/example/src/App.tsx +++ b/example/src/App.tsx @@ -1,46 +1,37 @@ -import React, { Component } from 'react'; +import React, { useState } from 'react'; import { StyleSheet, Text, View, TouchableOpacity, ScrollView } from 'react-native'; import BarcodeScreenExample from './BarcodeScreenExample'; import CameraExample from './CameraExample'; -type State = { - example?: any; -}; - -export default class App extends Component { - state: State; +const App = () => { + const [example, setExample] = useState(); - constructor(props) { - super(props); - this.state = { - example: undefined, - }; + if (example) { + return example; } - render() { - if (this.state.example) { - const Example = this.state.example; - return this.setState({ example: undefined })} />; - } - return ( - - - 🎈 - React Native Camera Kit - - - this.setState({ example: CameraExample })}> - Camera - - this.setState({ example: BarcodeScreenExample })}> - Barcode Scanner - - - - ); - } -} + const onBack = () => setExample(undefined); + + return ( + + + 🎈 + React Native Camera Kit + + + setExample()}> + Camera + + setExample()}> + Barcode Scanner + + + + ); +}; + +export default App; const styles = StyleSheet.create({ container: { From 13aed7196f3ae263c2aaa7fbd6d20849f87e2a4e Mon Sep 17 00:00:00 2001 From: David Bertet Date: Tue, 4 Jul 2023 15:08:51 -0700 Subject: [PATCH 14/20] Refine orientation Make initializeMotionManager private Use extensions for conversions Handle accelerometer updates on background thread Use UIDeviceOrientation for deviceOrientation Converting UIDeviceOrientation to Interface/VideoOrientation counter-rotate it --- .../project.pbxproj | 12 ++ ios/ReactNativeCameraKit/RealCamera.swift | 131 +++++++----------- ios/ReactNativeCameraKit/Types.swift | 2 +- .../UIDeviceOrientation+Convert.swift | 28 ++++ .../UIInterfaceOrientation+Convert.swift | 26 ++++ 5 files changed, 119 insertions(+), 80 deletions(-) create mode 100644 ios/ReactNativeCameraKit/UIDeviceOrientation+Convert.swift create mode 100644 ios/ReactNativeCameraKit/UIInterfaceOrientation+Convert.swift diff --git a/ios/ReactNativeCameraKit.xcodeproj/project.pbxproj b/ios/ReactNativeCameraKit.xcodeproj/project.pbxproj index c070d40b28..e39f71c9d1 100644 --- a/ios/ReactNativeCameraKit.xcodeproj/project.pbxproj +++ b/ios/ReactNativeCameraKit.xcodeproj/project.pbxproj @@ -20,6 +20,9 @@ 46C558CB2A4AAB3400C68BA0 /* CameraProtocol.swift in Sources */ = {isa = PBXBuildFile; fileRef = 46C558CA2A4AAB3400C68BA0 /* CameraProtocol.swift */; }; 46C558CD2A4AAB5D00C68BA0 /* SimulatorCamera.swift in Sources */ = {isa = PBXBuildFile; fileRef = 46C558CC2A4AAB5D00C68BA0 /* SimulatorCamera.swift */; }; 46C558CF2A4AAD7300C68BA0 /* FocusInterfaceView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 46C558CE2A4AAD7300C68BA0 /* FocusInterfaceView.swift */; }; + 46EE6F472A54C107001B9C30 /* UIInterfaceOrientation+Convert.swift in Sources */ = {isa = PBXBuildFile; fileRef = 46EE6F462A54C107001B9C30 /* UIInterfaceOrientation+Convert.swift */; }; + 46EE6F492A54C12D001B9C30 /* AVCaptureDevice+Setter.swift in Sources */ = {isa = PBXBuildFile; fileRef = 46EE6F482A54C12D001B9C30 /* AVCaptureDevice+Setter.swift */; }; + 46EE6F4B2A54CEB3001B9C30 /* UIDeviceOrientation+Convert.swift in Sources */ = {isa = PBXBuildFile; fileRef = 46EE6F4A2A54CEB3001B9C30 /* UIDeviceOrientation+Convert.swift */; }; 46F30C012A3A859B000597F6 /* ScannerFrameView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 46F30C002A3A859B000597F6 /* ScannerFrameView.swift */; }; 46F30C032A3ABB9D000597F6 /* ScannerInterfaceView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 46F30C022A3ABB9D000597F6 /* ScannerInterfaceView.swift */; }; /* End PBXBuildFile section */ @@ -53,6 +56,9 @@ 46C558CA2A4AAB3400C68BA0 /* CameraProtocol.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraProtocol.swift; sourceTree = ""; }; 46C558CC2A4AAB5D00C68BA0 /* SimulatorCamera.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SimulatorCamera.swift; sourceTree = ""; }; 46C558CE2A4AAD7300C68BA0 /* FocusInterfaceView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FocusInterfaceView.swift; sourceTree = ""; }; + 46EE6F462A54C107001B9C30 /* UIInterfaceOrientation+Convert.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "UIInterfaceOrientation+Convert.swift"; sourceTree = ""; }; + 46EE6F482A54C12D001B9C30 /* AVCaptureDevice+Setter.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice+Setter.swift"; sourceTree = ""; }; + 46EE6F4A2A54CEB3001B9C30 /* UIDeviceOrientation+Convert.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "UIDeviceOrientation+Convert.swift"; sourceTree = ""; }; 46F30C002A3A859B000597F6 /* ScannerFrameView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ScannerFrameView.swift; sourceTree = ""; }; 46F30C022A3ABB9D000597F6 /* ScannerInterfaceView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ScannerInterfaceView.swift; sourceTree = ""; }; /* End PBXFileReference section */ @@ -87,6 +93,9 @@ 264693501CFB2A6B00F3A740 /* ReactNativeCameraKit */ = { isa = PBXGroup; children = ( + 46EE6F482A54C12D001B9C30 /* AVCaptureDevice+Setter.swift */, + 46EE6F4A2A54CEB3001B9C30 /* UIDeviceOrientation+Convert.swift */, + 46EE6F462A54C107001B9C30 /* UIInterfaceOrientation+Convert.swift */, 26550AF51CFC7086007FF2DF /* CKCameraManager.m */, 4620AA712A2C4FA500BC8929 /* CameraManager.swift */, 463096872A2C757F002ABA1A /* CKTypes+RCTConvert.m */, @@ -172,9 +181,12 @@ 4620AA702A2C4A5F00BC8929 /* SimulatorPreviewView.swift in Sources */, 46F30C032A3ABB9D000597F6 /* ScannerInterfaceView.swift in Sources */, 46C558C92A4AAAD100C68BA0 /* RealCamera.swift in Sources */, + 46EE6F4B2A54CEB3001B9C30 /* UIDeviceOrientation+Convert.swift in Sources */, + 46EE6F472A54C107001B9C30 /* UIInterfaceOrientation+Convert.swift in Sources */, 46F30C012A3A859B000597F6 /* ScannerFrameView.swift in Sources */, 46C558CD2A4AAB5D00C68BA0 /* SimulatorCamera.swift in Sources */, 46506F272A37810C0058D3F2 /* RealPreviewView.swift in Sources */, + 46EE6F492A54C12D001B9C30 /* AVCaptureDevice+Setter.swift in Sources */, 4630968B2A2D5423002ABA1A /* Types.swift in Sources */, 4620AA6C2A2C03FC00BC8929 /* RatioOverlayView.swift in Sources */, 460C0C6C2A4B52D800066334 /* PhotoCaptureDelegate.swift in Sources */, diff --git a/ios/ReactNativeCameraKit/RealCamera.swift b/ios/ReactNativeCameraKit/RealCamera.swift index 2dc318c5f1..4c019491f2 100644 --- a/ios/ReactNativeCameraKit/RealCamera.swift +++ b/ios/ReactNativeCameraKit/RealCamera.swift @@ -35,7 +35,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega private var scannerFrameSize: CGRect? = nil private var onOrientationChange: RCTDirectEventBlock? - private var deviceOrientation = UIInterfaceOrientation.unknown + private var deviceOrientation = UIDeviceOrientation.unknown private var motionManager: CMMotionManager? // KVO observation @@ -84,43 +84,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega } // MARK: - Public - - func initializeMotionManager() { - motionManager = CMMotionManager() - motionManager?.accelerometerUpdateInterval = 0.2 - motionManager?.gyroUpdateInterval = 0.2 - motionManager?.startAccelerometerUpdates(to: (OperationQueue.current)!, withHandler: { - (accelerometerData, error) -> Void in - if error != nil { - print("\(error!)") - } - guard let acceleration = accelerometerData?.acceleration else { - print("no acceleration data") - return - } - var orientationNew: UIInterfaceOrientation - if acceleration.x >= 0.75 { - orientationNew = .landscapeLeft - } else if acceleration.x <= -0.75 { - orientationNew = .landscapeRight - } else if acceleration.y <= -0.75 { - orientationNew = .portrait - } else if acceleration.y >= 0.75 { - orientationNew = .portraitUpsideDown - } else { - // Device is not clearly pointing in either direction - // (e.g. it's flat on the table, so stick with the same orientation) - return - } - - if orientationNew == self.deviceOrientation { - return - } - self.deviceOrientation = orientationNew - self.onOrientationChange?(["orientation": Orientation.init(from: orientationNew)!.rawValue]) - }) - } - + func setup(cameraType: CameraType, supportedBarcodeType: [AVMetadataObject.ObjectType]) { DispatchQueue.main.async { self.cameraPreview.session = self.session @@ -131,8 +95,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega } else { interfaceOrientation = UIApplication.shared.statusBarOrientation } - let orientation = self.counterRotatedCaptureVideoOrientationFrom(deviceOrientation: interfaceOrientation) - self.cameraPreview.previewLayer.connection?.videoOrientation = orientation! + self.cameraPreview.previewLayer.connection?.videoOrientation = interfaceOrientation.videoOrientation } self.initializeMotionManager() @@ -251,7 +214,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega the main thread and session configuration is done on the session queue. */ DispatchQueue.main.async { - let videoPreviewLayerOrientation = self.counterRotatedCaptureVideoOrientationFrom(deviceOrientation: self.deviceOrientation) ?? self.cameraPreview.previewLayer.connection?.videoOrientation + let videoPreviewLayerOrientation = self.deviceOrientation.videoOrientation ?? self.cameraPreview.previewLayer.connection?.videoOrientation self.sessionQueue.async { if let photoOutputConnection = self.photoOutput.connection(with: .video), let videoPreviewLayerOrientation { @@ -348,47 +311,13 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega // MARK: - Private - private func counterRotatedCaptureVideoOrientationFrom(deviceOrientation: UIInterfaceOrientation) -> AVCaptureVideoOrientation? { - switch(deviceOrientation) { - case .portrait: - return .portrait - case .portraitUpsideDown: - return .portraitUpsideDown - case .landscapeLeft: - return .landscapeLeft - case .landscapeRight: - return .landscapeRight - case .unknown: break - @unknown default: break - } - return nil - } - private func uiOrientationChanged(notification: Notification) { - guard let device = notification.object as? UIDevice else { + guard let device = notification.object as? UIDevice, + let videoOrientation = device.orientation.videoOrientation else { return } - - // Counter-rotate video when in landscapeLeft/Right UI so it appears level - // (note how landscapeLeft sets landscapeRight) - switch(device.orientation) { - case .unknown: break - case .portrait: - self.cameraPreview.previewLayer.connection?.videoOrientation = .portrait - print("ui portrait") - case .portraitUpsideDown: - self.cameraPreview.previewLayer.connection?.videoOrientation = .portraitUpsideDown - print("ui upside down") - case .landscapeLeft: - self.cameraPreview.previewLayer.connection?.videoOrientation = .landscapeRight - print("ui landscapeLeft") - case .landscapeRight: - self.cameraPreview.previewLayer.connection?.videoOrientation = .landscapeLeft - print("ui landscapeRight") - case .faceUp: break - case .faceDown: break - @unknown default: break - } + + self.cameraPreview.previewLayer.connection?.videoOrientation = videoOrientation } private func getBestDevice(for cameraType: CameraType) -> AVCaptureDevice? { @@ -460,6 +389,50 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega return 1.0 } + // MARK: - Private device orientation from accelerometer + + private func initializeMotionManager() { + motionManager = CMMotionManager() + motionManager?.accelerometerUpdateInterval = 0.2 + motionManager?.gyroUpdateInterval = 0.2 + motionManager?.startAccelerometerUpdates(to: OperationQueue(), withHandler: { [weak self] (accelerometerData, error) -> Void in + guard error == nil else { + print("\(error!)") + return + } + guard let accelerometerData else { + print("no acceleration data") + return + } + + guard let newOrientation = self?.deviceOrientation(from: accelerometerData.acceleration), + newOrientation != self?.deviceOrientation else { + return + } + + self?.deviceOrientation = newOrientation + self?.onOrientationChange?(["orientation": Orientation.init(from: newOrientation)!.rawValue]) + }) + } + + private func deviceOrientation(from acceleration: CMAcceleration) -> UIDeviceOrientation? { + let threshold = 0.75 + if acceleration.x >= threshold { + return .landscapeRight + } else if acceleration.x <= -threshold { + return .landscapeLeft + } else if acceleration.y <= -threshold { + return .portrait + } else if acceleration.y >= threshold { + return .portraitUpsideDown + } else { + // Device is not clearly pointing in either direction + // (e.g. it's flat on the table, so stick with the same orientation) + return nil + } + } + + // MARK: Private observers private func addObservers() { diff --git a/ios/ReactNativeCameraKit/Types.swift b/ios/ReactNativeCameraKit/Types.swift index 68f3f658d5..331ebbcb87 100644 --- a/ios/ReactNativeCameraKit/Types.swift +++ b/ios/ReactNativeCameraKit/Types.swift @@ -139,7 +139,7 @@ enum Orientation: Int { } } - var avVideoOrientation: AVCaptureVideoOrientation { + var videoOrientation: AVCaptureVideoOrientation { switch self { case .portrait: return .portrait case .landscapeLeft: return .landscapeLeft diff --git a/ios/ReactNativeCameraKit/UIDeviceOrientation+Convert.swift b/ios/ReactNativeCameraKit/UIDeviceOrientation+Convert.swift new file mode 100644 index 0000000000..263afde5c0 --- /dev/null +++ b/ios/ReactNativeCameraKit/UIDeviceOrientation+Convert.swift @@ -0,0 +1,28 @@ +// +// UIDeviceOrientation+Convert.swift +// ReactNativeCameraKit +// + +import UIKit +import AVFoundation + +// Device orientation counter-rotate interface when in landscapeLeft/Right so it appears level +// (note how landscapeLeft sets landscapeRight) +extension UIDeviceOrientation { + var videoOrientation: AVCaptureVideoOrientation? { + get { + switch self { + case .portrait: + return .portrait + case .portraitUpsideDown: + return .portraitUpsideDown + case .landscapeLeft: + return .landscapeRight + case .landscapeRight: + return .landscapeLeft + case .faceUp, .faceDown, .unknown: return nil + @unknown default: return nil + } + } + } +} diff --git a/ios/ReactNativeCameraKit/UIInterfaceOrientation+Convert.swift b/ios/ReactNativeCameraKit/UIInterfaceOrientation+Convert.swift new file mode 100644 index 0000000000..04b26ca102 --- /dev/null +++ b/ios/ReactNativeCameraKit/UIInterfaceOrientation+Convert.swift @@ -0,0 +1,26 @@ +// +// UIInterfaceOrientation+Convert.swift +// ReactNativeCameraKit +// + +import UIKit +import AVFoundation + +extension UIInterfaceOrientation { + var videoOrientation: AVCaptureVideoOrientation { + get { + switch self { + case .portrait: + return .portrait + case .portraitUpsideDown: + return .portraitUpsideDown + case .landscapeLeft: + return .landscapeLeft + case .landscapeRight: + return .landscapeRight + case .unknown: return .portrait + @unknown default: return .portrait + } + } + } +} From 231bb301b3f7fe92619bd6ba4c96fd20efb85d39 Mon Sep 17 00:00:00 2001 From: David Bertet Date: Wed, 5 Jul 2023 01:11:08 -0700 Subject: [PATCH 15/20] Try to match native app pinch to zoom --- .../AVCaptureDevice+Setter.swift | 17 ++++++++++++++++- ios/ReactNativeCameraKit/CameraView.swift | 1 + ios/ReactNativeCameraKit/RealCamera.swift | 4 +--- 3 files changed, 18 insertions(+), 4 deletions(-) diff --git a/ios/ReactNativeCameraKit/AVCaptureDevice+Setter.swift b/ios/ReactNativeCameraKit/AVCaptureDevice+Setter.swift index 2e8641191d..5559887713 100644 --- a/ios/ReactNativeCameraKit/AVCaptureDevice+Setter.swift +++ b/ios/ReactNativeCameraKit/AVCaptureDevice+Setter.swift @@ -27,12 +27,27 @@ extension AVCaptureDevice { defer { unlockForConfiguration() } let desiredZoomFactor = videoZoomFactor + zoomFactorIncrement - videoZoomFactor = max(1.0, min(desiredZoomFactor, activeFormat.videoMaxZoomFactor)) + let maxZoomFactor = min(20, maxAvailableVideoZoomFactor) + videoZoomFactor = max(1.0, min(desiredZoomFactor, maxZoomFactor)) } catch { print("Error setting zoom factor: \(error)") } } + func scaleZoomFactor(_ scale: CGFloat) { + do { + try lockForConfiguration() + + defer { unlockForConfiguration() } + + let desiredZoomFactor = videoZoomFactor * scale + let maxZoomFactor = min(20, maxAvailableVideoZoomFactor) + videoZoomFactor = max(1.0, min(desiredZoomFactor, maxZoomFactor)) + } catch { + print("Error setting zoom factor: \(error)") + } + } + func focusWithMode(_ focusMode: AVCaptureDevice.FocusMode, exposeWithMode exposureMode: AVCaptureDevice.ExposureMode, atDevicePoint point: CGPoint, diff --git a/ios/ReactNativeCameraKit/CameraView.swift b/ios/ReactNativeCameraKit/CameraView.swift index f10cae080c..8047c752aa 100644 --- a/ios/ReactNativeCameraKit/CameraView.swift +++ b/ios/ReactNativeCameraKit/CameraView.swift @@ -312,6 +312,7 @@ class CameraView: UIView { @objc func handlePinchToZoomRecognizer(_ pinchRecognizer: UIPinchGestureRecognizer) { if pinchRecognizer.state == .changed { camera.update(pinchVelocity: pinchRecognizer.velocity, pinchScale: pinchRecognizer.scale) + // Reset scale after every reading to get a one timeframe scale value. Otherwise pinchRecognizer.scale is relative to the start of the gesture pinchRecognizer.scale = 1.0 } } diff --git a/ios/ReactNativeCameraKit/RealCamera.swift b/ios/ReactNativeCameraKit/RealCamera.swift index 4c019491f2..c4ab377762 100644 --- a/ios/ReactNativeCameraKit/RealCamera.swift +++ b/ios/ReactNativeCameraKit/RealCamera.swift @@ -123,9 +123,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega guard !pinchScale.isNaN else { return } sessionQueue.async { - let pinchVelocityDividerFactor: Float = 10.0 - let incrementZoomFactor = CGFloat(atan2f(Float(pinchVelocity), pinchVelocityDividerFactor)) - self.videoDeviceInput?.device.incrementZoomFactor(incrementZoomFactor) + self.videoDeviceInput?.device.scaleZoomFactor(pinchScale) } } From 2f0195bfdc7895362f98d9c1b6671c8ff1985238 Mon Sep 17 00:00:00 2001 From: David Bertet Date: Wed, 5 Jul 2023 01:22:45 -0700 Subject: [PATCH 16/20] Enable videoStabilization if supported --- ios/ReactNativeCameraKit/RealCamera.swift | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/ios/ReactNativeCameraKit/RealCamera.swift b/ios/ReactNativeCameraKit/RealCamera.swift index c4ab377762..b8f82997c5 100644 --- a/ios/ReactNativeCameraKit/RealCamera.swift +++ b/ios/ReactNativeCameraKit/RealCamera.swift @@ -355,6 +355,12 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega if session.canAddOutput(photoOutput) { session.addOutput(photoOutput) + + if let photoOutputConnection = self.photoOutput.connection(with: .video) { + if photoOutputConnection.isVideoStabilizationSupported { + photoOutputConnection.preferredVideoStabilizationMode = .auto + } + } } else { return .sessionConfigurationFailed } From fedec20b2648ae6399941c5d0d8512aef7cae307 Mon Sep 17 00:00:00 2001 From: David Bertet Date: Thu, 6 Jul 2023 00:39:20 -0700 Subject: [PATCH 17/20] Remove extensions & unused pinch velocity --- .../project.pbxproj | 12 -- .../AVCaptureDevice+Setter.swift | 75 ------------- ios/ReactNativeCameraKit/CameraProtocol.swift | 2 +- ios/ReactNativeCameraKit/CameraView.swift | 2 +- ios/ReactNativeCameraKit/RealCamera.swift | 106 +++++++++++++++--- .../SimulatorCamera.swift | 4 +- .../UIDeviceOrientation+Convert.swift | 28 ----- .../UIInterfaceOrientation+Convert.swift | 26 ----- 8 files changed, 92 insertions(+), 163 deletions(-) delete mode 100644 ios/ReactNativeCameraKit/AVCaptureDevice+Setter.swift delete mode 100644 ios/ReactNativeCameraKit/UIDeviceOrientation+Convert.swift delete mode 100644 ios/ReactNativeCameraKit/UIInterfaceOrientation+Convert.swift diff --git a/ios/ReactNativeCameraKit.xcodeproj/project.pbxproj b/ios/ReactNativeCameraKit.xcodeproj/project.pbxproj index e39f71c9d1..c070d40b28 100644 --- a/ios/ReactNativeCameraKit.xcodeproj/project.pbxproj +++ b/ios/ReactNativeCameraKit.xcodeproj/project.pbxproj @@ -20,9 +20,6 @@ 46C558CB2A4AAB3400C68BA0 /* CameraProtocol.swift in Sources */ = {isa = PBXBuildFile; fileRef = 46C558CA2A4AAB3400C68BA0 /* CameraProtocol.swift */; }; 46C558CD2A4AAB5D00C68BA0 /* SimulatorCamera.swift in Sources */ = {isa = PBXBuildFile; fileRef = 46C558CC2A4AAB5D00C68BA0 /* SimulatorCamera.swift */; }; 46C558CF2A4AAD7300C68BA0 /* FocusInterfaceView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 46C558CE2A4AAD7300C68BA0 /* FocusInterfaceView.swift */; }; - 46EE6F472A54C107001B9C30 /* UIInterfaceOrientation+Convert.swift in Sources */ = {isa = PBXBuildFile; fileRef = 46EE6F462A54C107001B9C30 /* UIInterfaceOrientation+Convert.swift */; }; - 46EE6F492A54C12D001B9C30 /* AVCaptureDevice+Setter.swift in Sources */ = {isa = PBXBuildFile; fileRef = 46EE6F482A54C12D001B9C30 /* AVCaptureDevice+Setter.swift */; }; - 46EE6F4B2A54CEB3001B9C30 /* UIDeviceOrientation+Convert.swift in Sources */ = {isa = PBXBuildFile; fileRef = 46EE6F4A2A54CEB3001B9C30 /* UIDeviceOrientation+Convert.swift */; }; 46F30C012A3A859B000597F6 /* ScannerFrameView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 46F30C002A3A859B000597F6 /* ScannerFrameView.swift */; }; 46F30C032A3ABB9D000597F6 /* ScannerInterfaceView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 46F30C022A3ABB9D000597F6 /* ScannerInterfaceView.swift */; }; /* End PBXBuildFile section */ @@ -56,9 +53,6 @@ 46C558CA2A4AAB3400C68BA0 /* CameraProtocol.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraProtocol.swift; sourceTree = ""; }; 46C558CC2A4AAB5D00C68BA0 /* SimulatorCamera.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SimulatorCamera.swift; sourceTree = ""; }; 46C558CE2A4AAD7300C68BA0 /* FocusInterfaceView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FocusInterfaceView.swift; sourceTree = ""; }; - 46EE6F462A54C107001B9C30 /* UIInterfaceOrientation+Convert.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "UIInterfaceOrientation+Convert.swift"; sourceTree = ""; }; - 46EE6F482A54C12D001B9C30 /* AVCaptureDevice+Setter.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice+Setter.swift"; sourceTree = ""; }; - 46EE6F4A2A54CEB3001B9C30 /* UIDeviceOrientation+Convert.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "UIDeviceOrientation+Convert.swift"; sourceTree = ""; }; 46F30C002A3A859B000597F6 /* ScannerFrameView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ScannerFrameView.swift; sourceTree = ""; }; 46F30C022A3ABB9D000597F6 /* ScannerInterfaceView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ScannerInterfaceView.swift; sourceTree = ""; }; /* End PBXFileReference section */ @@ -93,9 +87,6 @@ 264693501CFB2A6B00F3A740 /* ReactNativeCameraKit */ = { isa = PBXGroup; children = ( - 46EE6F482A54C12D001B9C30 /* AVCaptureDevice+Setter.swift */, - 46EE6F4A2A54CEB3001B9C30 /* UIDeviceOrientation+Convert.swift */, - 46EE6F462A54C107001B9C30 /* UIInterfaceOrientation+Convert.swift */, 26550AF51CFC7086007FF2DF /* CKCameraManager.m */, 4620AA712A2C4FA500BC8929 /* CameraManager.swift */, 463096872A2C757F002ABA1A /* CKTypes+RCTConvert.m */, @@ -181,12 +172,9 @@ 4620AA702A2C4A5F00BC8929 /* SimulatorPreviewView.swift in Sources */, 46F30C032A3ABB9D000597F6 /* ScannerInterfaceView.swift in Sources */, 46C558C92A4AAAD100C68BA0 /* RealCamera.swift in Sources */, - 46EE6F4B2A54CEB3001B9C30 /* UIDeviceOrientation+Convert.swift in Sources */, - 46EE6F472A54C107001B9C30 /* UIInterfaceOrientation+Convert.swift in Sources */, 46F30C012A3A859B000597F6 /* ScannerFrameView.swift in Sources */, 46C558CD2A4AAB5D00C68BA0 /* SimulatorCamera.swift in Sources */, 46506F272A37810C0058D3F2 /* RealPreviewView.swift in Sources */, - 46EE6F492A54C12D001B9C30 /* AVCaptureDevice+Setter.swift in Sources */, 4630968B2A2D5423002ABA1A /* Types.swift in Sources */, 4620AA6C2A2C03FC00BC8929 /* RatioOverlayView.swift in Sources */, 460C0C6C2A4B52D800066334 /* PhotoCaptureDelegate.swift in Sources */, diff --git a/ios/ReactNativeCameraKit/AVCaptureDevice+Setter.swift b/ios/ReactNativeCameraKit/AVCaptureDevice+Setter.swift deleted file mode 100644 index 5559887713..0000000000 --- a/ios/ReactNativeCameraKit/AVCaptureDevice+Setter.swift +++ /dev/null @@ -1,75 +0,0 @@ -// -// AVCaptureDevice+Setter.swift -// ReactNativeCameraKit -// - -import AVFoundation - -extension AVCaptureDevice { - func setTorchMode(_ newTorchMode: AVCaptureDevice.TorchMode) { - if isTorchModeSupported(newTorchMode) && hasTorch { - do { - try lockForConfiguration() - - defer { unlockForConfiguration() } - - torchMode = newTorchMode - } catch { - print("Error setting torch mode: \(error)") - } - } - } - - func incrementZoomFactor(_ zoomFactorIncrement: CGFloat) { - do { - try lockForConfiguration() - - defer { unlockForConfiguration() } - - let desiredZoomFactor = videoZoomFactor + zoomFactorIncrement - let maxZoomFactor = min(20, maxAvailableVideoZoomFactor) - videoZoomFactor = max(1.0, min(desiredZoomFactor, maxZoomFactor)) - } catch { - print("Error setting zoom factor: \(error)") - } - } - - func scaleZoomFactor(_ scale: CGFloat) { - do { - try lockForConfiguration() - - defer { unlockForConfiguration() } - - let desiredZoomFactor = videoZoomFactor * scale - let maxZoomFactor = min(20, maxAvailableVideoZoomFactor) - videoZoomFactor = max(1.0, min(desiredZoomFactor, maxZoomFactor)) - } catch { - print("Error setting zoom factor: \(error)") - } - } - - func focusWithMode(_ focusMode: AVCaptureDevice.FocusMode, - exposeWithMode exposureMode: AVCaptureDevice.ExposureMode, - atDevicePoint point: CGPoint, - isSubjectAreaChangeMonitoringEnabled: Bool) { - do { - try lockForConfiguration() - - defer { unlockForConfiguration() } - - if isFocusPointOfInterestSupported && isFocusModeSupported(focusMode) { - focusPointOfInterest = point - self.focusMode = focusMode - } - - if isExposurePointOfInterestSupported && isExposureModeSupported(exposureMode) { - exposurePointOfInterest = point - self.exposureMode = exposureMode - } - - self.isSubjectAreaChangeMonitoringEnabled = isSubjectAreaChangeMonitoringEnabled - } catch { - print("Error setting focus: \(error)") - } - } -} diff --git a/ios/ReactNativeCameraKit/CameraProtocol.swift b/ios/ReactNativeCameraKit/CameraProtocol.swift index 557712f523..b186964e36 100644 --- a/ios/ReactNativeCameraKit/CameraProtocol.swift +++ b/ios/ReactNativeCameraKit/CameraProtocol.swift @@ -11,7 +11,7 @@ protocol CameraProtocol: AnyObject, FocusInterfaceViewDelegate { func setup(cameraType: CameraType, supportedBarcodeType: [AVMetadataObject.ObjectType]) func cameraRemovedFromSuperview() - func update(pinchVelocity: CGFloat, pinchScale: CGFloat) + func update(pinchScale: CGFloat) func update(torchMode: TorchMode) func update(flashMode: FlashMode) func update(cameraType: CameraType) diff --git a/ios/ReactNativeCameraKit/CameraView.swift b/ios/ReactNativeCameraKit/CameraView.swift index 8047c752aa..69e66454eb 100644 --- a/ios/ReactNativeCameraKit/CameraView.swift +++ b/ios/ReactNativeCameraKit/CameraView.swift @@ -311,7 +311,7 @@ class CameraView: UIView { @objc func handlePinchToZoomRecognizer(_ pinchRecognizer: UIPinchGestureRecognizer) { if pinchRecognizer.state == .changed { - camera.update(pinchVelocity: pinchRecognizer.velocity, pinchScale: pinchRecognizer.scale) + camera.update(pinchScale: pinchRecognizer.scale) // Reset scale after every reading to get a one timeframe scale value. Otherwise pinchRecognizer.scale is relative to the start of the gesture pinchRecognizer.scale = 1.0 } diff --git a/ios/ReactNativeCameraKit/RealCamera.swift b/ios/ReactNativeCameraKit/RealCamera.swift index b8f82997c5..e76a9c076a 100644 --- a/ios/ReactNativeCameraKit/RealCamera.swift +++ b/ios/ReactNativeCameraKit/RealCamera.swift @@ -95,7 +95,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega } else { interfaceOrientation = UIApplication.shared.statusBarOrientation } - self.cameraPreview.previewLayer.connection?.videoOrientation = interfaceOrientation.videoOrientation + self.cameraPreview.previewLayer.connection?.videoOrientation = self.videoOrientation(from: interfaceOrientation) } self.initializeMotionManager() @@ -119,11 +119,23 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega } } - func update(pinchVelocity: CGFloat, pinchScale: CGFloat) { + func update(pinchScale: CGFloat) { guard !pinchScale.isNaN else { return } sessionQueue.async { - self.videoDeviceInput?.device.scaleZoomFactor(pinchScale) + guard let videoDevice = self.videoDeviceInput?.device else { return } + + do { + try videoDevice.lockForConfiguration() + + let desiredZoomFactor = videoDevice.videoZoomFactor * pinchScale + let maxZoomFactor = min(20, videoDevice.maxAvailableVideoZoomFactor) + videoDevice.videoZoomFactor = max(1.0, min(desiredZoomFactor, maxZoomFactor)) + + videoDevice.unlockForConfiguration() + } catch { + print("Error setting zoom factor: \(error)") + } } } @@ -132,6 +144,8 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega let devicePoint = self.cameraPreview.previewLayer.captureDevicePointConverted(fromLayerPoint: touchPoint) self.sessionQueue.async { + guard let videoDevice = self.videoDeviceInput?.device else { return } + if case let .customFocus(_, resetFocus, focusFinished) = focusBehavior { self.resetFocus = resetFocus self.focusFinished = focusFinished @@ -140,10 +154,25 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega self.focusFinished = nil } - self.videoDeviceInput?.device.focusWithMode(focusBehavior.avFocusMode, - exposeWithMode: focusBehavior.exposureMode, - atDevicePoint: devicePoint, - isSubjectAreaChangeMonitoringEnabled: focusBehavior.isSubjectAreaChangeMonitoringEnabled) + do { + try videoDevice.lockForConfiguration() + + if videoDevice.isFocusPointOfInterestSupported && videoDevice.isFocusModeSupported(focusBehavior.avFocusMode) { + videoDevice.focusPointOfInterest = devicePoint + videoDevice.focusMode = focusBehavior.avFocusMode + } + + if videoDevice.isExposurePointOfInterestSupported && videoDevice.isExposureModeSupported(focusBehavior.exposureMode) { + videoDevice.exposurePointOfInterest = devicePoint + videoDevice.exposureMode = focusBehavior.exposureMode + } + + videoDevice.isSubjectAreaChangeMonitoringEnabled = focusBehavior.isSubjectAreaChangeMonitoringEnabled + + videoDevice.unlockForConfiguration() + } catch { + print("Error setting focus: \(error)") + } } } } @@ -155,9 +184,19 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega func update(torchMode: TorchMode) { self.torchMode = torchMode - sessionQueue.asyncAfter(deadline: .now() + 0.1) { - if (self.videoDeviceInput?.device.torchMode != torchMode.avTorchMode) { - self.videoDeviceInput?.device.setTorchMode(torchMode.avTorchMode) + sessionQueue.async { + guard let videoDevice = self.videoDeviceInput?.device, videoDevice.torchMode != torchMode.avTorchMode else { return } + + if videoDevice.isTorchModeSupported(torchMode.avTorchMode) && videoDevice.hasTorch { + do { + try videoDevice.lockForConfiguration() + + videoDevice.torchMode = torchMode.avTorchMode + + videoDevice.unlockForConfiguration() + } catch { + print("Error setting torch mode: \(error)") + } } } } @@ -212,7 +251,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega the main thread and session configuration is done on the session queue. */ DispatchQueue.main.async { - let videoPreviewLayerOrientation = self.deviceOrientation.videoOrientation ?? self.cameraPreview.previewLayer.connection?.videoOrientation + let videoPreviewLayerOrientation = self.videoOrientation(from: self.deviceOrientation) ?? self.cameraPreview.previewLayer.connection?.videoOrientation self.sessionQueue.async { if let photoOutputConnection = self.photoOutput.connection(with: .video), let videoPreviewLayerOrientation { @@ -309,15 +348,38 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega // MARK: - Private - private func uiOrientationChanged(notification: Notification) { - guard let device = notification.object as? UIDevice, - let videoOrientation = device.orientation.videoOrientation else { - return + private func videoOrientation(from deviceOrientation: UIDeviceOrientation) -> AVCaptureVideoOrientation? { + // Device orientation counter-rotate interface when in landscapeLeft/Right so it appears level + // (note how landscapeLeft sets landscapeRight) + switch deviceOrientation { + case .portrait: + return .portrait + case .portraitUpsideDown: + return .portraitUpsideDown + case .landscapeLeft: + return .landscapeRight + case .landscapeRight: + return .landscapeLeft + case .faceUp, .faceDown, .unknown: return nil + @unknown default: return nil } + } - self.cameraPreview.previewLayer.connection?.videoOrientation = videoOrientation + private func videoOrientation(from interfaceOrientation: UIInterfaceOrientation) -> AVCaptureVideoOrientation { + switch interfaceOrientation { + case .portrait: + return .portrait + case .portraitUpsideDown: + return .portraitUpsideDown + case .landscapeLeft: + return .landscapeLeft + case .landscapeRight: + return .landscapeRight + case .unknown: return .portrait + @unknown default: return .portrait + } } - + private func getBestDevice(for cameraType: CameraType) -> AVCaptureDevice? { if #available(iOS 13.0, *) { if let device = AVCaptureDevice.default(.builtInTripleCamera, for: .video, position: cameraType.avPosition) { @@ -436,7 +498,6 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega } } - // MARK: Private observers private func addObservers() { @@ -482,6 +543,15 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega resetFocus?() } + private func uiOrientationChanged(notification: Notification) { + guard let device = notification.object as? UIDevice, + let videoOrientation = videoOrientation(from: device.orientation) else { + return + } + + self.cameraPreview.previewLayer.connection?.videoOrientation = videoOrientation + } + private func sessionRuntimeError(notification: Notification) { guard let error = notification.userInfo?[AVCaptureSessionErrorKey] as? AVError else { return } diff --git a/ios/ReactNativeCameraKit/SimulatorCamera.swift b/ios/ReactNativeCameraKit/SimulatorCamera.swift index 28fddc65b6..0f118ccf45 100644 --- a/ios/ReactNativeCameraKit/SimulatorCamera.swift +++ b/ios/ReactNativeCameraKit/SimulatorCamera.swift @@ -54,9 +54,9 @@ class SimulatorCamera: CameraProtocol { self.onOrientationChange = onOrientationChange } - func update(pinchVelocity: CGFloat, pinchScale: CGFloat) { + func update(pinchScale: CGFloat) { DispatchQueue.main.async { - self.mockPreview.zoomVelocityLabel.text = "Zoom Velocity: \(pinchVelocity), Scale: \(pinchScale)" + self.mockPreview.zoomVelocityLabel.text = "Zoom Scale: \(pinchScale)" } } diff --git a/ios/ReactNativeCameraKit/UIDeviceOrientation+Convert.swift b/ios/ReactNativeCameraKit/UIDeviceOrientation+Convert.swift deleted file mode 100644 index 263afde5c0..0000000000 --- a/ios/ReactNativeCameraKit/UIDeviceOrientation+Convert.swift +++ /dev/null @@ -1,28 +0,0 @@ -// -// UIDeviceOrientation+Convert.swift -// ReactNativeCameraKit -// - -import UIKit -import AVFoundation - -// Device orientation counter-rotate interface when in landscapeLeft/Right so it appears level -// (note how landscapeLeft sets landscapeRight) -extension UIDeviceOrientation { - var videoOrientation: AVCaptureVideoOrientation? { - get { - switch self { - case .portrait: - return .portrait - case .portraitUpsideDown: - return .portraitUpsideDown - case .landscapeLeft: - return .landscapeRight - case .landscapeRight: - return .landscapeLeft - case .faceUp, .faceDown, .unknown: return nil - @unknown default: return nil - } - } - } -} diff --git a/ios/ReactNativeCameraKit/UIInterfaceOrientation+Convert.swift b/ios/ReactNativeCameraKit/UIInterfaceOrientation+Convert.swift deleted file mode 100644 index 04b26ca102..0000000000 --- a/ios/ReactNativeCameraKit/UIInterfaceOrientation+Convert.swift +++ /dev/null @@ -1,26 +0,0 @@ -// -// UIInterfaceOrientation+Convert.swift -// ReactNativeCameraKit -// - -import UIKit -import AVFoundation - -extension UIInterfaceOrientation { - var videoOrientation: AVCaptureVideoOrientation { - get { - switch self { - case .portrait: - return .portrait - case .portraitUpsideDown: - return .portraitUpsideDown - case .landscapeLeft: - return .landscapeLeft - case .landscapeRight: - return .landscapeRight - case .unknown: return .portrait - @unknown default: return .portrait - } - } - } -} From b0048a0bb66a26791bb8779526b85b2849bfdb26 Mon Sep 17 00:00:00 2001 From: David Bertet Date: Thu, 6 Jul 2023 00:43:39 -0700 Subject: [PATCH 18/20] Remove `setTorchMode` method on Android `torchMode` props should be used instead --- .../main/java/com/rncamerakit/RNCameraKitModule.kt | 11 ----------- src/Camera.android.tsx | 3 --- 2 files changed, 14 deletions(-) diff --git a/android/src/main/java/com/rncamerakit/RNCameraKitModule.kt b/android/src/main/java/com/rncamerakit/RNCameraKitModule.kt index bda8ddbd66..be8ad9a6a2 100644 --- a/android/src/main/java/com/rncamerakit/RNCameraKitModule.kt +++ b/android/src/main/java/com/rncamerakit/RNCameraKitModule.kt @@ -37,15 +37,4 @@ class RNCameraKitModule(private val reactContext: ReactApplicationContext) : Rea view.capture(options.toHashMap(), promise) } } - - @ReactMethod - fun setTorchMode( mode: String, viewTag: Int) { - val context = reactContext - val uiManager = context.getNativeModule(UIManagerModule::class.java) - context.runOnUiQueueThread { - val view = uiManager?.resolveView(viewTag) as CKCamera - view.setTorchMode(mode) - } - - } } \ No newline at end of file diff --git a/src/Camera.android.tsx b/src/Camera.android.tsx index a69e58bcf2..07bb2b984a 100644 --- a/src/Camera.android.tsx +++ b/src/Camera.android.tsx @@ -16,9 +16,6 @@ const Camera = React.forwardRef((props: CameraProps, ref) => { // we must use the general module and tell it what View it's supposed to be using return await RNCameraKitModule.capture(options, findNodeHandle(nativeRef.current ?? null)); }, - setTorchMode: (mode = 'off') => { - RNCameraKitModule.setTorchMode(mode, findNodeHandle(nativeRef.current ?? null)); - }, requestDeviceCameraAuthorization: () => { throw new Error('Not implemented'); }, From 5f1096230c0f688d0029f86e2a0b3a6ef2319b1d Mon Sep 17 00:00:00 2001 From: David Bertet Date: Sat, 8 Jul 2023 01:30:02 -0700 Subject: [PATCH 19/20] Rework UI to make it work on both platforms --- .../android/app/src/main/AndroidManifest.xml | 1 + example/src/BarcodeScreenExample.tsx | 170 ++++++--------- example/src/CameraExample.tsx | 206 +++++++----------- 3 files changed, 146 insertions(+), 231 deletions(-) diff --git a/example/android/app/src/main/AndroidManifest.xml b/example/android/app/src/main/AndroidManifest.xml index 4122f36a59..560d305f9e 100644 --- a/example/android/app/src/main/AndroidManifest.xml +++ b/example/android/app/src/main/AndroidManifest.xml @@ -2,6 +2,7 @@ + void }) => { const cameraRatio = 4 / 3; return ( - - - - {flashData.image && ( - onSetFlash()}> - - - )} - onSwitchCameraPressed()}> - - - onSetTorch()}> - + + + {flashData.image && ( + + - + )} + + + + + + + + + void }) => { )} */} + - - { - onBack(); - }} - > + + Back - - onCaptureImagePressed()}> - - - - - - {barcode} - - + + + + + + + + + + + {barcode} + @@ -186,89 +185,44 @@ const BarcodeExample = ({ onBack }: { onBack: () => void }) => { export default BarcodeExample; const styles = StyleSheet.create({ - top: { - zIndex: 10, + screen: { + height: '100%', + backgroundColor: 'black', }, + topButtons: { + margin: 10, + zIndex: 10, flexDirection: 'row', - justifyContent: 'center', - // borderColor: 'yellow', - // position: 'relative', - }, - flashMode: { - position: 'absolute', - left: 10, - top: 0, - bottom: 0, - padding: 10, + justifyContent: 'space-between', }, - switchCamera: { - padding: 10, - }, - torch: { - position: 'absolute', - right: 10, - top: 0, - bottom: 0, + topButton: { padding: 10, }, + cameraContainer: { - ...Platform.select({ - android: { - position: 'absolute', - top: 0, - left: 0, - width, - height, - }, - default: { - justifyContent: 'center', - flex: 1, - }, - }), + justifyContent: 'center', + flex: 1, + }, + cameraPreview: { + aspectRatio: 3 / 4, + width: '100%', }, bottomButtons: { - bottom: 0, - left: 0, - right: 0, - }, - bottomButtonsInner: { - paddingVertical: 10, + margin: 10, + flexDirection: 'row', + alignItems: 'center', }, - backBtn: { - position: 'absolute', - left: 10, - top: 0, - bottom: 0, - justifyContent: 'center', - zIndex: 10, - padding: 10, + backBtnContainer: { + flex: 1, + alignItems: 'flex-start', }, captureButtonContainer: { + flex: 1, justifyContent: 'center', alignItems: 'center', - zIndex: 9, - }, - rightBottomArea: { - position: 'absolute', - right: 20, - top: 0, - bottom: 0, - zIndex: 10, - }, - textStyle: { - color: 'white', - fontSize: 20, }, - // ratioBestText: { - // color: 'white', - // fontSize: 18, - // }, - // ratioText: { - // color: '#ffc233', - // fontSize: 18, - // }, textNumberContainer: { position: 'absolute', top: 0, @@ -278,8 +232,14 @@ const styles = StyleSheet.create({ justifyContent: 'center', alignItems: 'center', }, - gap: { - flex: 10, - flexDirection: 'column', + barcodeContainer: { + flex: 1, + alignItems: 'flex-end', + justifyContent: 'center', + }, + textStyle: { + padding: 10, + color: 'white', + fontSize: 20, }, }); diff --git a/example/src/CameraExample.tsx b/example/src/CameraExample.tsx index 2a2724ba9c..dd7964fda9 100644 --- a/example/src/CameraExample.tsx +++ b/example/src/CameraExample.tsx @@ -1,15 +1,5 @@ import React, { useState, useRef } from 'react'; -import { - StyleSheet, - Text, - View, - TouchableOpacity, - Image, - Dimensions, - Platform, - SafeAreaView, - useWindowDimensions, -} from 'react-native'; +import { StyleSheet, Text, View, TouchableOpacity, Image, Dimensions, SafeAreaView } from 'react-native'; import Camera from '../../src/Camera'; import { CameraApi, CameraType, CaptureData } from '../../src/types'; import { Orientation } from '../../src'; @@ -100,40 +90,34 @@ const CameraExample = ({ onBack }: { onBack: () => void }) => { console.log('image', image); }; - const window = useWindowDimensions(); - const cameraRatio = 4 / 3; - return ( - - - - {flashData.image && ( - onSetFlash()}> - - - )} - onSwitchCameraPressed()}> - - - onSetTorch()}> - + + + {flashData.image && ( + + - - - - {showImageUri ? ( + )} + + + + + + + + + + + {showImageUri ? ( + ) : ( void }) => { /> )} + - - onBack()}> - Back + + + Back - - onCaptureImagePressed()}> - - - {numberOfImagesTaken()} - + + + + + + + {numberOfImagesTaken()} + + + + + + {captureImages.length > 0 && ( + { + if (showImageUri) { + setShowImageUri(''); + } else { + setShowImageUri(captureImages[captureImages.length - 1].uri); + } + }} + > + - - - {captureImages.length > 0 && ( - { - if (showImageUri) { - setShowImageUri(''); - } else { - setShowImageUri(captureImages[captureImages.length - 1].uri); - } - }} - > - - - )} - + )} @@ -188,83 +175,48 @@ const CameraExample = ({ onBack }: { onBack: () => void }) => { export default CameraExample; const styles = StyleSheet.create({ - top: { - zIndex: 10, + screen: { + height: '100%', + backgroundColor: 'black', }, + topButtons: { + margin: 10, + zIndex: 10, flexDirection: 'row', - justifyContent: 'center', - // borderColor: 'yellow', - // position: 'relative', - }, - flashMode: { - position: 'absolute', - left: 10, - top: 0, - bottom: 0, - padding: 10, + justifyContent: 'space-between', }, - switchCamera: { - padding: 10, - }, - torch: { - position: 'absolute', - right: 10, - top: 0, - bottom: 0, + topButton: { padding: 10, }, + cameraContainer: { - ...Platform.select({ - android: { - position: 'absolute', - top: 0, - left: 0, - width, - height, - }, - default: { - justifyContent: 'center', - flex: 1, - // zIndex: 0 - }, - }), + justifyContent: 'center', + flex: 1, + }, + cameraPreview: { + aspectRatio: 3 / 4, + width: '100%', }, bottomButtons: { - bottom: 0, - left: 0, - right: 0, + margin: 10, + flexDirection: 'row', + alignItems: 'center', }, - bottomButtonsInner: { - paddingVertical: 10, + backBtnContainer: { + flex: 1, + alignItems: 'flex-start', }, - backBtn: { - position: 'absolute', - left: 10, - top: 0, - bottom: 0, - justifyContent: 'center', - zIndex: 10, + backTextStyle: { padding: 10, + color: 'white', + fontSize: 20, }, captureButtonContainer: { + flex: 1, justifyContent: 'center', alignItems: 'center', - zIndex: 9, - }, - rightBottomArea: { - position: 'absolute', - right: 20, - top: 0, - bottom: 0, - zIndex: 10, - alignItems: 'center', - justifyContent: 'center', - }, - textStyle: { - color: 'white', - fontSize: 20, }, textNumberContainer: { position: 'absolute', @@ -275,13 +227,15 @@ const styles = StyleSheet.create({ justifyContent: 'center', alignItems: 'center', }, - gap: { - flex: 10, - flexDirection: 'column', + thumbnailContainer: { + flex: 1, + alignItems: 'flex-end', + justifyContent: 'center', }, - preview: { + thumbnail: { width: 48, height: 48, borderRadius: 4, + marginEnd: 10, }, }); From 478a68b23e63646ea6fa90cda604f63f1b5f6070 Mon Sep 17 00:00:00 2001 From: Seph Soliman Date: Mon, 10 Jul 2023 12:18:35 -0700 Subject: [PATCH 20/20] Fixed scan frame dimensions on Android --- .../src/main/java/com/rncamerakit/CKCamera.kt | 2 +- .../com/rncamerakit/barcode/BarcodeFrame.kt | 26 +++++++++-------- example/package.json | 3 +- example/src/BarcodeScreenExample.tsx | 25 ++++++++++++++--- example/src/CameraExample.tsx | 28 ++++++++++++------- 5 files changed, 57 insertions(+), 27 deletions(-) diff --git a/android/src/main/java/com/rncamerakit/CKCamera.kt b/android/src/main/java/com/rncamerakit/CKCamera.kt index 294df4f1f2..b0ad32d59e 100644 --- a/android/src/main/java/com/rncamerakit/CKCamera.kt +++ b/android/src/main/java/com/rncamerakit/CKCamera.kt @@ -501,7 +501,7 @@ class CKCamera(context: ThemedReactContext) : FrameLayout(context), LifecycleObs val height: Int = convertDeviceHeightToSupportedAspectRatio(actualPreviewWidth, actualPreviewHeight) barcodeFrame!!.setFrameColor(frameColor) barcodeFrame!!.setLaserColor(laserColor) - (barcodeFrame as View).layout(0, 0, actualPreviewWidth, height) + (barcodeFrame as View).layout(0, 0, this.effectLayer.width, this.effectLayer.height) addView(barcodeFrame) } else if (barcodeFrame != null) { removeView(barcodeFrame) diff --git a/android/src/main/java/com/rncamerakit/barcode/BarcodeFrame.kt b/android/src/main/java/com/rncamerakit/barcode/BarcodeFrame.kt index 80f3aa1e9f..ae694a839c 100644 --- a/android/src/main/java/com/rncamerakit/barcode/BarcodeFrame.kt +++ b/android/src/main/java/com/rncamerakit/barcode/BarcodeFrame.kt @@ -6,6 +6,8 @@ import android.view.View import androidx.annotation.ColorInt import com.rncamerakit.R +import kotlin.math.max +import kotlin.math.min class BarcodeFrame(context: Context) : View(context) { private var borderPaint: Paint = Paint() @@ -29,14 +31,18 @@ class BarcodeFrame(context: Context) : View(context) { override fun onMeasure(widthMeasureSpec: Int, heightMeasureSpec: Int) { super.onMeasure(widthMeasureSpec, heightMeasureSpec) - frameWidth = measuredWidth - frameHeight = measuredHeight - val marginWidth = width / WIDTH_SCALE - val marginHeight = (height / HEIGHT_SCALE).toInt() - frameRect.left = marginWidth - frameRect.right = width - marginWidth - frameRect.top = marginHeight - frameRect.bottom = height - marginHeight + val marginHeight = 40 + val marginWidth = 40 + val frameMaxWidth = 1200 + val frameMaxHeight = 600 + val frameMinWidth = 100 + val frameMinHeight = 100 + frameWidth = max(frameMinWidth, min(frameMaxWidth, measuredWidth - (marginWidth * 2))) + frameHeight = max(frameMinHeight, min(frameMaxHeight, measuredHeight - (marginHeight * 2))) + frameRect.left = (measuredWidth / 2) - (frameWidth / 2) + frameRect.right = (measuredWidth / 2) + (frameWidth / 2) + frameRect.top = (measuredHeight / 2) - (frameHeight / 2) + frameRect.bottom = (measuredHeight / 2) + (frameHeight / 2) } override fun onDraw(canvas: Canvas) { @@ -75,9 +81,7 @@ class BarcodeFrame(context: Context) : View(context) { companion object { private const val STROKE_WIDTH = 5 - private const val ANIMATION_SPEED = 8 - private const val WIDTH_SCALE = 7 - private const val HEIGHT_SCALE = 2.75 + private const val ANIMATION_SPEED = 4 } init { diff --git a/example/package.json b/example/package.json index 4717fec48d..b0ad59e32a 100644 --- a/example/package.json +++ b/example/package.json @@ -6,7 +6,8 @@ "scripts": { "android": "react-native run-android", "ios": "react-native run-ios", - "start": "react-native start" + "start": "react-native start", + "reverse": "adb reverse tcp:8081 tcp:8081" }, "dependencies": { "lodash": "^4.17.20", diff --git a/example/src/BarcodeScreenExample.tsx b/example/src/BarcodeScreenExample.tsx index 0da9ee25f2..0b9de8c572 100644 --- a/example/src/BarcodeScreenExample.tsx +++ b/example/src/BarcodeScreenExample.tsx @@ -13,8 +13,7 @@ import { } from 'react-native'; import Camera from '../../src/Camera'; import { CameraApi, CameraType, CaptureData } from '../../src/types'; - -const { width, height } = Dimensions.get('window'); +import { Orientation } from '../../src'; const flashImages = { on: require('../images/flashOn.png'), @@ -131,7 +130,26 @@ const BarcodeExample = ({ onBack }: { onBack: () => void }) => { focusMode="on" torchMode={torchMode ? 'on' : 'off'} onOrientationChange={(e) => { - console.log('orientationChange', e.nativeEvent); + // We recommend locking the camera UI to portrait (using a different library) + // and rotating the UI elements counter to the orientation + // However, we include onOrientationChange so you can match your UI to what the camera does + switch(e.nativeEvent.orientation) { + case Orientation.LANDSCAPE_LEFT: + console.log('orientationChange', 'LANDSCAPE_LEFT'); + break; + case Orientation.LANDSCAPE_RIGHT: + console.log('orientationChange', 'LANDSCAPE_RIGHT'); + break; + case Orientation.PORTRAIT: + console.log('orientationChange', 'PORTRAIT'); + break; + case Orientation.PORTRAIT_UPSIDE_DOWN: + console.log('orientationChange', 'PORTRAIT_UPSIDE_DOWN'); + break; + default: + console.log('orientationChange', e.nativeEvent); + break; + } }} // ratioOverlay={ratios[ratioArrayPosition]} laserColor="red" @@ -208,7 +226,6 @@ const styles = StyleSheet.create({ aspectRatio: 3 / 4, width: '100%', }, - bottomButtons: { margin: 10, flexDirection: 'row', diff --git a/example/src/CameraExample.tsx b/example/src/CameraExample.tsx index dd7964fda9..5745f565ce 100644 --- a/example/src/CameraExample.tsx +++ b/example/src/CameraExample.tsx @@ -1,11 +1,9 @@ import React, { useState, useRef } from 'react'; -import { StyleSheet, Text, View, TouchableOpacity, Image, Dimensions, SafeAreaView } from 'react-native'; +import { StyleSheet, Text, View, TouchableOpacity, Image, SafeAreaView } from 'react-native'; import Camera from '../../src/Camera'; import { CameraApi, CameraType, CaptureData } from '../../src/types'; import { Orientation } from '../../src'; -const { width, height } = Dimensions.get('window'); - const flashImages = { on: require('../images/flashOn.png'), off: require('../images/flashOff.png'), @@ -127,10 +125,23 @@ const CameraExample = ({ onBack }: { onBack: () => void }) => { // We recommend locking the camera UI to portrait (using a different library) // and rotating the UI elements counter to the orientation // However, we include onOrientationChange so you can match your UI to what the camera does - const isLandscape = [Orientation.LANDSCAPE_LEFT, Orientation.LANDSCAPE_RIGHT].includes( - e.nativeEvent.orientation, - ); - console.log('orientationChange', isLandscape ? 'landscape' : 'portrait'); + switch(e.nativeEvent.orientation) { + case Orientation.LANDSCAPE_LEFT: + console.log('orientationChange', 'LANDSCAPE_LEFT'); + break; + case Orientation.LANDSCAPE_RIGHT: + console.log('orientationChange', 'LANDSCAPE_RIGHT'); + break; + case Orientation.PORTRAIT: + console.log('orientationChange', 'PORTRAIT'); + break; + case Orientation.PORTRAIT_UPSIDE_DOWN: + console.log('orientationChange', 'PORTRAIT_UPSIDE_DOWN'); + break; + default: + console.log('orientationChange', e.nativeEvent); + break; + } }} /> )} @@ -179,7 +190,6 @@ const styles = StyleSheet.create({ height: '100%', backgroundColor: 'black', }, - topButtons: { margin: 10, zIndex: 10, @@ -189,7 +199,6 @@ const styles = StyleSheet.create({ topButton: { padding: 10, }, - cameraContainer: { justifyContent: 'center', flex: 1, @@ -198,7 +207,6 @@ const styles = StyleSheet.create({ aspectRatio: 3 / 4, width: '100%', }, - bottomButtons: { margin: 10, flexDirection: 'row',