diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 7cac1a5313..432ee6f8b7 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -12,18 +12,18 @@ jobs: - name: Bootstrap run: yarn bootstrap - name: Build - run: cd example/ios && xcodebuild -workspace CameraKitExample.xcworkspace -configuration Debug -scheme CameraKitExample -arch x86_64 + run: cd example/ios && xcodebuild -workspace CameraKitExample.xcworkspace -configuration Debug -scheme CameraKitExample -sdk iphoneos build CODE_SIGN_IDENTITY="" CODE_SIGNING_REQUIRED=NO build-example-android: - name: build-example-android - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v2 - - name: Validate Gradle Wrapper - uses: gradle/wrapper-validation-action@v1 - - name: Install modules - run: yarn - - name: Bootstrap - run: yarn bootstrap - - name: Build - run: cd example/android && ./gradlew assembleDebug + name: build-example-android + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Validate Gradle Wrapper + uses: gradle/wrapper-validation-action@v1 + - name: Install modules + run: yarn + - name: Bootstrap + run: yarn bootstrap + - name: Build + run: cd example/android && ./gradlew assembleDebug diff --git a/.prettierrc.js b/.prettierrc.js index af6b94eeb4..3e4729ab77 100644 --- a/.prettierrc.js +++ b/.prettierrc.js @@ -7,7 +7,7 @@ module.exports = { printWidth: 120, semi: true, jsxBracketSameLine: false, - jsxSingleQuote: true, + jsxSingleQuote: false, arrowParens: 'always', }; \ No newline at end of file diff --git a/README.md b/README.md index e45fab7770..726764e5b1 100644 --- a/README.md +++ b/README.md @@ -74,43 +74,6 @@ Add the following usage descriptions to your `Info.plist` (usually found at: `io ## Components -### CameraScreen - -Full screen camera component that holds camera state and provides common camera controls. Works for most needs - -```ts -import { CameraScreen } from 'react-native-camera-kit'; -``` - -```tsx - this.onBottomButtonPressed(event)} - flashImages={{ - // optional, images for flash state button - on: require('path/to/image'), - off: require('path/to/image'), - auto: require('path/to/image'), - }} - flashImageStyle={} // optional, ImageStyle applied to flashImages - cameraFlipImage={require('path/to/image')} // optional, image for flipping camera button - cameraFlipImageStyle={} // optional, ImageStyle applied to cameraFlipImage - captureButtonImage={require('path/to/image')} // optional, image capture button - captureButtonImageStyle={} // optional, ImageStyle applied to captureButtonImage - torchOnImage={require('path/to/image')} // optional, image for toggling on flash light - torchOffImage={require('path/to/image')} // optional, image for toggling off flash light - torchImageStyle={} // optional, ImageStyle applied to torchImage - hideControls={false} // (default false) optional, hides camera controls - showCapturedImageCount={false} // (default false) optional, show count for photos taken during that capture session - cameraRatioOverlay // optional - allowCaptureRetake={false} // (default false) optional, ask for picture validation -/> -``` - ### Camera Barebones camera component if you need advanced/customized interface @@ -129,7 +92,7 @@ import { Camera, CameraType } from 'react-native-camera-kit'; #### Barcode / QR Code Scanning -Additionally, the Camera / CameraScreen can be used for barcode scanning +Additionally, the Camera can be used for barcode scanning ```tsx ``` -### CameraScreen Props (Optional) - -| Props | Type | Description | -| ------------------------- | ------------------------------------ | ------------------------------------------------------------------------------------------ | -| All Camera Props | | | -| `actions` | Actions | Labels for the buttons visible on screen | -| `onBottomButtonPressed` | `(event: BottomPressedData) => void` | Callback called when a button is pressed. `BottomPressedData` contains the data to consume | -| `flashImages` | FlashImages | Images for flash state button. Default: none, button is hidden | -| `flashImageStyle` | ImageStyle | ImageStyle applied to flashImages | -| `cameraFlipImage` | ImageSourcePropType | Image for flipping camera button. Default: none, button is hidden | -| `cameraFlipImageStyle` | ImageStyle | ImageStyle applied to cameraFlipImage | -| `captureButtonImage` | ImageSourcePropType | Image for capture button. Default: none, button is hidden | -| `captureButtonImageStyle` | ImageStyle | ImageStyle applied to captureButtonImage | -| `torchOnImage` | ImageSourcePropType | Image for toggling on flash light. Default: none, button is hidden | -| `torchOffImage` | ImageSourcePropType | Image for toggling off flash light. Default: none, button is hidden | -| `torchImageStyle` | ImageStyle | ImageStyle applied to torchOnImage/torchOffImage | -| `hideControls` | Boolean | Hides camera controls Default: `false` | -| `showCapturedImageCount` | Boolean | Show count for photos taken during that capture session. Default: `false` | -| `cameraRatioOverlay` | CameraRatioOverlay | | -| `allowCaptureRetake` | Boolean | Ask for picture validation. Default: `false` | - ### Camera Props (Optional) | Props | Type | Description | @@ -177,10 +119,11 @@ Additionally, the Camera / CameraScreen can be used for barcode scanning | `cameraType` | CameraType.Back/CameraType.Front | Choose what camera to use. Default: `CameraType.Back` | | `onOrientationChange` | Function | Callback when physical device orientation changes. Returned event contains `orientation`. Ex: `onOrientationChange={(event) => console.log(event.nativeEvent.orientation)}`. Use `import { Orientation } from 'react-native-camera-kit'; if (event.nativeEvent.orientation === Orientation.PORTRAIT) { ... }` to understand the new value | | **iOS only** | -| `ratioOverlay` | `['int:int', ...]` | Show a guiding overlay in the camera preview for the selected ratio. Does not crop image as of v9.0. Example: `['16:9', '1:1', '3:4']` | +| `ratioOverlay` | `'int:int'` | Show a guiding overlay in the camera preview for the selected ratio. Does not crop image as of v9.0. Example: `'16:9'` | | `ratioOverlayColor` | Color | Any color with alpha. Default: `'#ffffff77'` | | `resetFocusTimeout` | Number | Dismiss tap to focus after this many milliseconds. Default `0` (disabled). Example: `5000` is 5 seconds. | | `resetFocusWhenMotionDetected` | Boolean | Dismiss tap to focus when focus area content changes. Native iOS feature, see documentation: https://developer.apple.com/documentation/avfoundation/avcapturedevice/1624644-subjectareachangemonitoringenabl?language=objc). Default `true`. | +| `scanThrottleDelay` | Number | Duration between scan detection in milliseconds. Default 2000 (2s) | | **Barcode only** | | `scanBarcode` | Boolean | Enable barcode scanner. Default: `false` | | `showFrame` | Boolean | Show frame in barcode scanner. Default: `false` | diff --git a/ReactNativeCameraKit.podspec b/ReactNativeCameraKit.podspec index 2beadd754c..f7fd54f419 100644 --- a/ReactNativeCameraKit.podspec +++ b/ReactNativeCameraKit.podspec @@ -13,7 +13,7 @@ Pod::Spec.new do |s| s.platform = :ios, "10.0" s.source = { :git => "https://github.com/teslamotors/react-native-camera-kit.git", :tag => "v#{s.version}" } - s.source_files = "ios/**/*.{h,m}" + s.source_files = "ios/**/*.{h,m,swift}" s.dependency 'React-Core' end diff --git a/android/src/main/java/com/rncamerakit/CKCamera.kt b/android/src/main/java/com/rncamerakit/CKCamera.kt index 294df4f1f2..b0ad32d59e 100644 --- a/android/src/main/java/com/rncamerakit/CKCamera.kt +++ b/android/src/main/java/com/rncamerakit/CKCamera.kt @@ -501,7 +501,7 @@ class CKCamera(context: ThemedReactContext) : FrameLayout(context), LifecycleObs val height: Int = convertDeviceHeightToSupportedAspectRatio(actualPreviewWidth, actualPreviewHeight) barcodeFrame!!.setFrameColor(frameColor) barcodeFrame!!.setLaserColor(laserColor) - (barcodeFrame as View).layout(0, 0, actualPreviewWidth, height) + (barcodeFrame as View).layout(0, 0, this.effectLayer.width, this.effectLayer.height) addView(barcodeFrame) } else if (barcodeFrame != null) { removeView(barcodeFrame) diff --git a/android/src/main/java/com/rncamerakit/RNCameraKitModule.kt b/android/src/main/java/com/rncamerakit/RNCameraKitModule.kt index bda8ddbd66..be8ad9a6a2 100644 --- a/android/src/main/java/com/rncamerakit/RNCameraKitModule.kt +++ b/android/src/main/java/com/rncamerakit/RNCameraKitModule.kt @@ -37,15 +37,4 @@ class RNCameraKitModule(private val reactContext: ReactApplicationContext) : Rea view.capture(options.toHashMap(), promise) } } - - @ReactMethod - fun setTorchMode( mode: String, viewTag: Int) { - val context = reactContext - val uiManager = context.getNativeModule(UIManagerModule::class.java) - context.runOnUiQueueThread { - val view = uiManager?.resolveView(viewTag) as CKCamera - view.setTorchMode(mode) - } - - } } \ No newline at end of file diff --git a/android/src/main/java/com/rncamerakit/barcode/BarcodeFrame.kt b/android/src/main/java/com/rncamerakit/barcode/BarcodeFrame.kt index 80f3aa1e9f..ae694a839c 100644 --- a/android/src/main/java/com/rncamerakit/barcode/BarcodeFrame.kt +++ b/android/src/main/java/com/rncamerakit/barcode/BarcodeFrame.kt @@ -6,6 +6,8 @@ import android.view.View import androidx.annotation.ColorInt import com.rncamerakit.R +import kotlin.math.max +import kotlin.math.min class BarcodeFrame(context: Context) : View(context) { private var borderPaint: Paint = Paint() @@ -29,14 +31,18 @@ class BarcodeFrame(context: Context) : View(context) { override fun onMeasure(widthMeasureSpec: Int, heightMeasureSpec: Int) { super.onMeasure(widthMeasureSpec, heightMeasureSpec) - frameWidth = measuredWidth - frameHeight = measuredHeight - val marginWidth = width / WIDTH_SCALE - val marginHeight = (height / HEIGHT_SCALE).toInt() - frameRect.left = marginWidth - frameRect.right = width - marginWidth - frameRect.top = marginHeight - frameRect.bottom = height - marginHeight + val marginHeight = 40 + val marginWidth = 40 + val frameMaxWidth = 1200 + val frameMaxHeight = 600 + val frameMinWidth = 100 + val frameMinHeight = 100 + frameWidth = max(frameMinWidth, min(frameMaxWidth, measuredWidth - (marginWidth * 2))) + frameHeight = max(frameMinHeight, min(frameMaxHeight, measuredHeight - (marginHeight * 2))) + frameRect.left = (measuredWidth / 2) - (frameWidth / 2) + frameRect.right = (measuredWidth / 2) + (frameWidth / 2) + frameRect.top = (measuredHeight / 2) - (frameHeight / 2) + frameRect.bottom = (measuredHeight / 2) + (frameHeight / 2) } override fun onDraw(canvas: Canvas) { @@ -75,9 +81,7 @@ class BarcodeFrame(context: Context) : View(context) { companion object { private const val STROKE_WIDTH = 5 - private const val ANIMATION_SPEED = 8 - private const val WIDTH_SCALE = 7 - private const val HEIGHT_SCALE = 2.75 + private const val ANIMATION_SPEED = 4 } init { diff --git a/example/android/app/src/main/AndroidManifest.xml b/example/android/app/src/main/AndroidManifest.xml index 4122f36a59..560d305f9e 100644 --- a/example/android/app/src/main/AndroidManifest.xml +++ b/example/android/app/src/main/AndroidManifest.xml @@ -2,6 +2,7 @@ + { + const [example, setExample] = useState(); -export default class App extends Component { - state: State; - - constructor(props) { - super(props); - this.state = { - example: undefined, - }; + if (example) { + return example; } - render() { - if (this.state.example) { - const Example = this.state.example; - return ; - } - return ( - - - 🎈 - - React Native Camera Kit - - - - this.setState({ example: CameraExample })}> - - Camera - - - this.setState({ example: CameraScreenExample })}> - - Camera Screen - - - this.setState({ example: BarcodeScreenExample })}> - - Barcode Scanner - - - + const onBack = () => setExample(undefined); + + return ( + + + 🎈 + React Native Camera Kit - ); - } -} + + setExample()}> + Camera + + setExample()}> + Barcode Scanner + + + + ); +}; + +export default App; const styles = StyleSheet.create({ container: { diff --git a/example/src/BarcodeScreenExample.tsx b/example/src/BarcodeScreenExample.tsx index bd08eab611..0b9de8c572 100644 --- a/example/src/BarcodeScreenExample.tsx +++ b/example/src/BarcodeScreenExample.tsx @@ -1,50 +1,262 @@ -import React, { Component } from 'react'; -import { Alert } from 'react-native'; -import CameraScreen from '../../src/CameraScreen'; -import CheckingScreen from './CheckingScreen'; - -export default class BarcodeScreenExample extends Component { - constructor(props) { - super(props); - this.state = { - example: undefined, - value: undefined, +import React, { useState, useRef, useEffect } from 'react'; +import { + StyleSheet, + Text, + View, + TouchableOpacity, + Image, + Dimensions, + Platform, + SafeAreaView, + useWindowDimensions, + Vibration, +} from 'react-native'; +import Camera from '../../src/Camera'; +import { CameraApi, CameraType, CaptureData } from '../../src/types'; +import { Orientation } from '../../src'; + +const flashImages = { + on: require('../images/flashOn.png'), + off: require('../images/flashOff.png'), + auto: require('../images/flashAuto.png'), +}; + +const flashArray = [ + { + mode: 'auto', + image: flashImages.auto, + }, + { + mode: 'on', + image: flashImages.on, + }, + { + mode: 'off', + image: flashImages.off, + }, +] as const; + +const BarcodeExample = ({ onBack }: { onBack: () => void }) => { + const cameraRef = useRef(null); + const [currentFlashArrayPosition, setCurrentFlashArrayPosition] = useState(0); + const [captureImages, setCaptureImages] = useState([]); + const [flashData, setFlashData] = useState(flashArray[currentFlashArrayPosition]); + const [torchMode, setTorchMode] = useState(false); + // const [ratios, setRatios] = useState([]); + // const [ratioArrayPosition, setRatioArrayPosition] = useState(-1); + const [captured, setCaptured] = useState(false); + const [cameraType, setCameraType] = useState(CameraType.Back); + const [barcode, setBarcode] = useState(''); + + useEffect(() => { + const t = setTimeout(() => { + setBarcode(''); + }, 2000); + return () => { + clearTimeout(t); }; - } - - onBottomButtonPressed(event) { - const captureImages = JSON.stringify(event.captureImages); - Alert.alert( - `"${event.type}" Button Pressed`, - `${captureImages}`, - [{ text: 'OK', onPress: () => console.log('OK Pressed') }], - { cancelable: false }, - ); - } - - render() { - if (this.state.example) { - const Screen = this.state.example; - return ; + }, [barcode]); + + // useEffect(() => { + // let updatedRatios = [...ratios]; + // if (props.cameraRatioOverlay) { + // updatedRatios = updatedRatios.concat(props.cameraRatioOverlay.ratios || []); + // } + // setRatios(updatedRatios); + // setRatioArrayPosition(updatedRatios.length > 0 ? 0 : -1); + // }, []); + + const onSwitchCameraPressed = () => { + const direction = cameraType === CameraType.Back ? CameraType.Front : CameraType.Back; + setCameraType(direction); + }; + + const onSetFlash = () => { + const newPosition = (currentFlashArrayPosition + 1) % 3; + setCurrentFlashArrayPosition(newPosition); + setFlashData(flashArray[newPosition]); + }; + + const onSetTorch = () => { + setTorchMode(!torchMode); + }; + + const onCaptureImagePressed = async () => { + if (!cameraRef.current) return; + const image = await cameraRef.current.capture(); + if (image) { + setCaptured(true); + setCaptureImages([...captureImages, image]); + console.log('image', image); } - return ( - this.onBottomButtonPressed(event)} - flashImages={{ - on: require('../images/flashOn.png'), - off: require('../images/flashOff.png'), - auto: require('../images/flashAuto.png'), - }} - scanBarcode - showFrame - laserColor="red" - frameColor="white" - onReadCode={(event) => { - this.setState({ example: CheckingScreen, value: event.nativeEvent.codeStringValue }); - }} - hideControls - /> - ); - } -} + }; + + // const onRatioButtonPressed = () => { + // const newPosition = (ratioArrayPosition + 1) % ratios.length; + // setRatioArrayPosition(newPosition); + // }; + + const window = useWindowDimensions(); + const cameraRatio = 4 / 3; + + return ( + + + {flashData.image && ( + + + + )} + + + + + + + + + + + + { + // We recommend locking the camera UI to portrait (using a different library) + // and rotating the UI elements counter to the orientation + // However, we include onOrientationChange so you can match your UI to what the camera does + switch(e.nativeEvent.orientation) { + case Orientation.LANDSCAPE_LEFT: + console.log('orientationChange', 'LANDSCAPE_LEFT'); + break; + case Orientation.LANDSCAPE_RIGHT: + console.log('orientationChange', 'LANDSCAPE_RIGHT'); + break; + case Orientation.PORTRAIT: + console.log('orientationChange', 'PORTRAIT'); + break; + case Orientation.PORTRAIT_UPSIDE_DOWN: + console.log('orientationChange', 'PORTRAIT_UPSIDE_DOWN'); + break; + default: + console.log('orientationChange', e.nativeEvent); + break; + } + }} + // ratioOverlay={ratios[ratioArrayPosition]} + laserColor="red" + frameColor="white" + scanBarcode + showFrame + onReadCode={(event) => { + Vibration.vibrate(100); + setBarcode(event.nativeEvent.codeStringValue); + console.log('barcode', event.nativeEvent.codeStringValue); + }} + /> + + {/* {ratios.length > 0 && ( + + + Your images look best at a {ratios[0] || ''} ratio + onRatioButtonPressed()} + > + {ratios[ratioArrayPosition]} + + + + )} */} + + + + + Back + + + + + + + + + + + + {barcode} + + + + + ); +}; + +export default BarcodeExample; + +const styles = StyleSheet.create({ + screen: { + height: '100%', + backgroundColor: 'black', + }, + + topButtons: { + margin: 10, + zIndex: 10, + flexDirection: 'row', + justifyContent: 'space-between', + }, + topButton: { + padding: 10, + }, + + cameraContainer: { + justifyContent: 'center', + flex: 1, + }, + cameraPreview: { + aspectRatio: 3 / 4, + width: '100%', + }, + bottomButtons: { + margin: 10, + flexDirection: 'row', + alignItems: 'center', + }, + backBtnContainer: { + flex: 1, + alignItems: 'flex-start', + }, + captureButtonContainer: { + flex: 1, + justifyContent: 'center', + alignItems: 'center', + }, + textNumberContainer: { + position: 'absolute', + top: 0, + left: 0, + bottom: 0, + right: 0, + justifyContent: 'center', + alignItems: 'center', + }, + barcodeContainer: { + flex: 1, + alignItems: 'flex-end', + justifyContent: 'center', + }, + textStyle: { + padding: 10, + color: 'white', + fontSize: 20, + }, +}); diff --git a/example/src/CameraExample.tsx b/example/src/CameraExample.tsx index 290b352c69..5745f565ce 100644 --- a/example/src/CameraExample.tsx +++ b/example/src/CameraExample.tsx @@ -1,42 +1,249 @@ -import React, { Component } from 'react'; -import { View, StyleSheet } from 'react-native'; +import React, { useState, useRef } from 'react'; +import { StyleSheet, Text, View, TouchableOpacity, Image, SafeAreaView } from 'react-native'; import Camera from '../../src/Camera'; -import { CameraType } from '../../src/types'; +import { CameraApi, CameraType, CaptureData } from '../../src/types'; +import { Orientation } from '../../src'; + +const flashImages = { + on: require('../images/flashOn.png'), + off: require('../images/flashOff.png'), + auto: require('../images/flashAuto.png'), +}; + +const flashArray = [ + { + mode: 'auto', + image: flashImages.auto, + }, + { + mode: 'on', + image: flashImages.on, + }, + { + mode: 'off', + image: flashImages.off, + }, +] as const; + +const CameraExample = ({ onBack }: { onBack: () => void }) => { + const cameraRef = useRef(null); + const [currentFlashArrayPosition, setCurrentFlashArrayPosition] = useState(0); + const [captureImages, setCaptureImages] = useState([]); + const [flashData, setFlashData] = useState(flashArray[currentFlashArrayPosition]); + const [torchMode, setTorchMode] = useState(false); + const [captured, setCaptured] = useState(false); + const [cameraType, setCameraType] = useState(CameraType.Back); + const [showImageUri, setShowImageUri] = useState(''); + + // iOS will error out if capturing too fast, + // so block capturing until the current capture is done + // This also minimizes issues of delayed capturing + const isCapturing = useRef(false); + + const numberOfImagesTaken = () => { + const numberTook = captureImages.length; + if (numberTook >= 2) { + return numberTook; + } else if (captured) { + return '1'; + } else { + return ''; + } + }; + + const onSwitchCameraPressed = () => { + const direction = cameraType === CameraType.Back ? CameraType.Front : CameraType.Back; + setCameraType(direction); + }; + + const onSetFlash = () => { + const newPosition = (currentFlashArrayPosition + 1) % 3; + setCurrentFlashArrayPosition(newPosition); + setFlashData(flashArray[newPosition]); + }; + + const onSetTorch = () => { + setTorchMode(!torchMode); + }; + + const onCaptureImagePressed = async () => { + if (showImageUri) { + setShowImageUri(''); + return; + } + if (!cameraRef.current || isCapturing.current) return; + let image: CaptureData | undefined; + try { + isCapturing.current = true; + image = await cameraRef.current.capture(); + } catch (e) { + console.log('error', e); + } finally { + isCapturing.current = false; + } + if (!image) return; + + setCaptured(true); + setCaptureImages([...captureImages, image]); + console.log('image', image); + }; + + return ( + + + {flashData.image && ( + + + + )} + + + + + + + + + -export default class CameraExample extends Component { - render() { - return ( - console.log(event.nativeEvent.codeStringValue)} - /> + {showImageUri ? ( + + ) : ( + { + // We recommend locking the camera UI to portrait (using a different library) + // and rotating the UI elements counter to the orientation + // However, we include onOrientationChange so you can match your UI to what the camera does + switch(e.nativeEvent.orientation) { + case Orientation.LANDSCAPE_LEFT: + console.log('orientationChange', 'LANDSCAPE_LEFT'); + break; + case Orientation.LANDSCAPE_RIGHT: + console.log('orientationChange', 'LANDSCAPE_RIGHT'); + break; + case Orientation.PORTRAIT: + console.log('orientationChange', 'PORTRAIT'); + break; + case Orientation.PORTRAIT_UPSIDE_DOWN: + console.log('orientationChange', 'PORTRAIT_UPSIDE_DOWN'); + break; + default: + console.log('orientationChange', e.nativeEvent); + break; + } + }} + /> + )} - ); - } -} -const styles = StyleSheet.create( - { - cameraContainer: { - flex: 1, - backgroundColor: 'black', - }, + + + + Back + + + + + + + + {numberOfImagesTaken()} + + + + + + {captureImages.length > 0 && ( + { + if (showImageUri) { + setShowImageUri(''); + } else { + setShowImageUri(captureImages[captureImages.length - 1].uri); + } + }} + > + + + )} + + + + ); +}; + +export default CameraExample; + +const styles = StyleSheet.create({ + screen: { + height: '100%', + backgroundColor: 'black', + }, + topButtons: { + margin: 10, + zIndex: 10, + flexDirection: 'row', + justifyContent: 'space-between', + }, + topButton: { + padding: 10, + }, + cameraContainer: { + justifyContent: 'center', + flex: 1, + }, + cameraPreview: { + aspectRatio: 3 / 4, + width: '100%', + }, + bottomButtons: { + margin: 10, + flexDirection: 'row', + alignItems: 'center', + }, + backBtnContainer: { + flex: 1, + alignItems: 'flex-start', + }, + backTextStyle: { + padding: 10, + color: 'white', + fontSize: 20, + }, + captureButtonContainer: { + flex: 1, + justifyContent: 'center', + alignItems: 'center', + }, + textNumberContainer: { + position: 'absolute', + top: 0, + left: 0, + bottom: 0, + right: 0, + justifyContent: 'center', + alignItems: 'center', + }, + thumbnailContainer: { + flex: 1, + alignItems: 'flex-end', + justifyContent: 'center', + }, + thumbnail: { + width: 48, + height: 48, + borderRadius: 4, + marginEnd: 10, }, -); +}); diff --git a/example/src/CameraScreenExample.tsx b/example/src/CameraScreenExample.tsx deleted file mode 100644 index 0ea9522cc9..0000000000 --- a/example/src/CameraScreenExample.tsx +++ /dev/null @@ -1,34 +0,0 @@ -import React, { Component } from 'react'; -import { Alert } from 'react-native'; -import CameraScreen from '../../src/CameraScreen'; - -export default class CameraScreenExample extends Component { - onBottomButtonPressed(event) { - const captureImages = JSON.stringify(event.captureImages); - Alert.alert( - `"${event.type}" Button Pressed`, - `${captureImages}`, - [{ text: 'OK', onPress: () => console.log('OK Pressed') }], - { cancelable: false }, - ); - } - - render() { - return ( - this.onBottomButtonPressed(event)} - flashImages={{ - on: require('../images/flashOn.png'), - off: require('../images/flashOff.png'), - auto: require('../images/flashAuto.png'), - }} - cameraFlipImage={require('../images/cameraFlipIcon.png')} - captureButtonImage={require('../images/cameraButton.png')} - torchOnImage={require('../images/torchOn.png')} - torchOffImage={require('../images/torchOff.png')} - showCapturedImageCount - /> - ); - } -} diff --git a/example/src/CheckingScreen.tsx b/example/src/CheckingScreen.tsx deleted file mode 100644 index a6e78bca1f..0000000000 --- a/example/src/CheckingScreen.tsx +++ /dev/null @@ -1,46 +0,0 @@ -import React, { Component } from 'react'; -import { View, TouchableOpacity, Text, StyleSheet } from 'react-native'; -import BarcodeScreen from './BarcodeScreenExample'; - -export default class CheckingScreen extends Component { - constructor(props) { - super(props); - this.state = { - example: undefined, - }; - } - - render() { - if (this.state.example) { - const CheckingScreen = this.state.example; - const value = this.state.value; - return ; - } - return ( - - {this.props.value} - this.setState({ example: BarcodeScreen })}> - Back button - - - ); - } -} - -const styles = StyleSheet.create({ - container: { - flex: 1, - paddingTop: 60, - alignItems: 'center', - backgroundColor: '#F5FCFF', - }, - valueText: { - marginBottom: 20, - fontSize: 40, - }, - buttonText: { - color: 'blue', - marginBottom: 20, - fontSize: 20, - }, -}); diff --git a/ios/ReactNativeCameraKit.xcodeproj/project.pbxproj b/ios/ReactNativeCameraKit.xcodeproj/project.pbxproj index 6a28265e0a..c070d40b28 100644 --- a/ios/ReactNativeCameraKit.xcodeproj/project.pbxproj +++ b/ios/ReactNativeCameraKit.xcodeproj/project.pbxproj @@ -8,11 +8,20 @@ /* Begin PBXBuildFile section */ 26550AF61CFC7086007FF2DF /* CKCameraManager.m in Sources */ = {isa = PBXBuildFile; fileRef = 26550AF51CFC7086007FF2DF /* CKCameraManager.m */; }; - 2685AA241CFD89A300E4A446 /* CKCamera.m in Sources */ = {isa = PBXBuildFile; fileRef = 2685AA231CFD89A300E4A446 /* CKCamera.m */; }; - 269292831D3B7D6000E07DDF /* CKCameraOverlayView.m in Sources */ = {isa = PBXBuildFile; fileRef = 269292821D3B7D6000E07DDF /* CKCameraOverlayView.m */; }; - 269292861D3B81C800E07DDF /* CKOverlayObject.m in Sources */ = {isa = PBXBuildFile; fileRef = 269292851D3B81C800E07DDF /* CKOverlayObject.m */; }; - A7686BFE1EC9CFEC00959216 /* CKCompressedImage.m in Sources */ = {isa = PBXBuildFile; fileRef = A7686BFD1EC9CFEC00959216 /* CKCompressedImage.m */; }; - FC8E10CF253F8A23006D5AD0 /* CKMockPreview.m in Sources */ = {isa = PBXBuildFile; fileRef = FC8E10CE253F8A23006D5AD0 /* CKMockPreview.m */; }; + 460C0C6C2A4B52D800066334 /* PhotoCaptureDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 460C0C6B2A4B52D800066334 /* PhotoCaptureDelegate.swift */; }; + 4620AA6C2A2C03FC00BC8929 /* RatioOverlayView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4620AA6B2A2C03FC00BC8929 /* RatioOverlayView.swift */; }; + 4620AA702A2C4A5F00BC8929 /* SimulatorPreviewView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4620AA6F2A2C4A5F00BC8929 /* SimulatorPreviewView.swift */; }; + 4620AA722A2C4FA500BC8929 /* CameraManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4620AA712A2C4FA500BC8929 /* CameraManager.swift */; }; + 4620AA742A2C52C300BC8929 /* CameraView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4620AA732A2C52C300BC8929 /* CameraView.swift */; }; + 463096882A2C757F002ABA1A /* CKTypes+RCTConvert.m in Sources */ = {isa = PBXBuildFile; fileRef = 463096872A2C757F002ABA1A /* CKTypes+RCTConvert.m */; }; + 4630968B2A2D5423002ABA1A /* Types.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4630968A2A2D5423002ABA1A /* Types.swift */; }; + 46506F272A37810C0058D3F2 /* RealPreviewView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 46506F262A37810B0058D3F2 /* RealPreviewView.swift */; }; + 46C558C92A4AAAD100C68BA0 /* RealCamera.swift in Sources */ = {isa = PBXBuildFile; fileRef = 46C558C82A4AAAD100C68BA0 /* RealCamera.swift */; }; + 46C558CB2A4AAB3400C68BA0 /* CameraProtocol.swift in Sources */ = {isa = PBXBuildFile; fileRef = 46C558CA2A4AAB3400C68BA0 /* CameraProtocol.swift */; }; + 46C558CD2A4AAB5D00C68BA0 /* SimulatorCamera.swift in Sources */ = {isa = PBXBuildFile; fileRef = 46C558CC2A4AAB5D00C68BA0 /* SimulatorCamera.swift */; }; + 46C558CF2A4AAD7300C68BA0 /* FocusInterfaceView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 46C558CE2A4AAD7300C68BA0 /* FocusInterfaceView.swift */; }; + 46F30C012A3A859B000597F6 /* ScannerFrameView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 46F30C002A3A859B000597F6 /* ScannerFrameView.swift */; }; + 46F30C032A3ABB9D000597F6 /* ScannerInterfaceView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 46F30C022A3ABB9D000597F6 /* ScannerInterfaceView.swift */; }; /* End PBXBuildFile section */ /* Begin PBXCopyFilesBuildPhase section */ @@ -29,18 +38,23 @@ /* Begin PBXFileReference section */ 2646934E1CFB2A6B00F3A740 /* libReactNativeCameraKit.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = libReactNativeCameraKit.a; sourceTree = BUILT_PRODUCTS_DIR; }; - 26550AF41CFC7086007FF2DF /* CKCameraManager.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CKCameraManager.h; sourceTree = ""; }; 26550AF51CFC7086007FF2DF /* CKCameraManager.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CKCameraManager.m; sourceTree = ""; }; - 2685AA221CFD89A300E4A446 /* CKCamera.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CKCamera.h; sourceTree = ""; }; - 2685AA231CFD89A300E4A446 /* CKCamera.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CKCamera.m; sourceTree = ""; }; - 269292811D3B7D6000E07DDF /* CKCameraOverlayView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CKCameraOverlayView.h; sourceTree = ""; }; - 269292821D3B7D6000E07DDF /* CKCameraOverlayView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CKCameraOverlayView.m; sourceTree = ""; }; - 269292841D3B81C800E07DDF /* CKOverlayObject.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CKOverlayObject.h; sourceTree = ""; }; - 269292851D3B81C800E07DDF /* CKOverlayObject.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CKOverlayObject.m; sourceTree = ""; }; - A7686BFC1EC9CFEC00959216 /* CKCompressedImage.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CKCompressedImage.h; sourceTree = ""; }; - A7686BFD1EC9CFEC00959216 /* CKCompressedImage.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CKCompressedImage.m; sourceTree = ""; }; - FC8E10CD253F8A23006D5AD0 /* CKMockPreview.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = CKMockPreview.h; sourceTree = ""; }; - FC8E10CE253F8A23006D5AD0 /* CKMockPreview.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = CKMockPreview.m; sourceTree = ""; }; + 460C0C6B2A4B52D800066334 /* PhotoCaptureDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PhotoCaptureDelegate.swift; sourceTree = ""; }; + 4620AA682A2BFDBC00BC8929 /* ReactNativeCameraKit-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "ReactNativeCameraKit-Bridging-Header.h"; sourceTree = ""; }; + 4620AA6B2A2C03FC00BC8929 /* RatioOverlayView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RatioOverlayView.swift; sourceTree = ""; }; + 4620AA6F2A2C4A5F00BC8929 /* SimulatorPreviewView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SimulatorPreviewView.swift; sourceTree = ""; }; + 4620AA712A2C4FA500BC8929 /* CameraManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraManager.swift; sourceTree = ""; }; + 4620AA732A2C52C300BC8929 /* CameraView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraView.swift; sourceTree = ""; }; + 463096872A2C757F002ABA1A /* CKTypes+RCTConvert.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = "CKTypes+RCTConvert.m"; sourceTree = ""; }; + 463096892A2C7D89002ABA1A /* ReactNativeCameraKit.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ReactNativeCameraKit.h; sourceTree = ""; }; + 4630968A2A2D5423002ABA1A /* Types.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Types.swift; sourceTree = ""; }; + 46506F262A37810B0058D3F2 /* RealPreviewView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RealPreviewView.swift; sourceTree = ""; }; + 46C558C82A4AAAD100C68BA0 /* RealCamera.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RealCamera.swift; sourceTree = ""; }; + 46C558CA2A4AAB3400C68BA0 /* CameraProtocol.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraProtocol.swift; sourceTree = ""; }; + 46C558CC2A4AAB5D00C68BA0 /* SimulatorCamera.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SimulatorCamera.swift; sourceTree = ""; }; + 46C558CE2A4AAD7300C68BA0 /* FocusInterfaceView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FocusInterfaceView.swift; sourceTree = ""; }; + 46F30C002A3A859B000597F6 /* ScannerFrameView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ScannerFrameView.swift; sourceTree = ""; }; + 46F30C022A3ABB9D000597F6 /* ScannerInterfaceView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ScannerInterfaceView.swift; sourceTree = ""; }; /* End PBXFileReference section */ /* Begin PBXFrameworksBuildPhase section */ @@ -73,18 +87,23 @@ 264693501CFB2A6B00F3A740 /* ReactNativeCameraKit */ = { isa = PBXGroup; children = ( - A7686BFC1EC9CFEC00959216 /* CKCompressedImage.h */, - A7686BFD1EC9CFEC00959216 /* CKCompressedImage.m */, - 26550AF41CFC7086007FF2DF /* CKCameraManager.h */, 26550AF51CFC7086007FF2DF /* CKCameraManager.m */, - 2685AA221CFD89A300E4A446 /* CKCamera.h */, - 2685AA231CFD89A300E4A446 /* CKCamera.m */, - 269292811D3B7D6000E07DDF /* CKCameraOverlayView.h */, - 269292821D3B7D6000E07DDF /* CKCameraOverlayView.m */, - 269292841D3B81C800E07DDF /* CKOverlayObject.h */, - 269292851D3B81C800E07DDF /* CKOverlayObject.m */, - FC8E10CD253F8A23006D5AD0 /* CKMockPreview.h */, - FC8E10CE253F8A23006D5AD0 /* CKMockPreview.m */, + 4620AA712A2C4FA500BC8929 /* CameraManager.swift */, + 463096872A2C757F002ABA1A /* CKTypes+RCTConvert.m */, + 4620AA732A2C52C300BC8929 /* CameraView.swift */, + 46C558CA2A4AAB3400C68BA0 /* CameraProtocol.swift */, + 46C558C82A4AAAD100C68BA0 /* RealCamera.swift */, + 46506F262A37810B0058D3F2 /* RealPreviewView.swift */, + 460C0C6B2A4B52D800066334 /* PhotoCaptureDelegate.swift */, + 46C558CC2A4AAB5D00C68BA0 /* SimulatorCamera.swift */, + 4620AA6F2A2C4A5F00BC8929 /* SimulatorPreviewView.swift */, + 4630968A2A2D5423002ABA1A /* Types.swift */, + 4620AA6B2A2C03FC00BC8929 /* RatioOverlayView.swift */, + 46F30C002A3A859B000597F6 /* ScannerFrameView.swift */, + 46F30C022A3ABB9D000597F6 /* ScannerInterfaceView.swift */, + 46C558CE2A4AAD7300C68BA0 /* FocusInterfaceView.swift */, + 4620AA682A2BFDBC00BC8929 /* ReactNativeCameraKit-Bridging-Header.h */, + 463096892A2C7D89002ABA1A /* ReactNativeCameraKit.h */, ); path = ReactNativeCameraKit; sourceTree = ""; @@ -120,7 +139,7 @@ TargetAttributes = { 2646934D1CFB2A6B00F3A740 = { CreatedOnToolsVersion = 7.3; - LastSwiftMigration = 1200; + LastSwiftMigration = 1430; }; }; }; @@ -147,12 +166,21 @@ isa = PBXSourcesBuildPhase; buildActionMask = 2147483647; files = ( + 46C558CF2A4AAD7300C68BA0 /* FocusInterfaceView.swift in Sources */, + 463096882A2C757F002ABA1A /* CKTypes+RCTConvert.m in Sources */, + 4620AA742A2C52C300BC8929 /* CameraView.swift in Sources */, + 4620AA702A2C4A5F00BC8929 /* SimulatorPreviewView.swift in Sources */, + 46F30C032A3ABB9D000597F6 /* ScannerInterfaceView.swift in Sources */, + 46C558C92A4AAAD100C68BA0 /* RealCamera.swift in Sources */, + 46F30C012A3A859B000597F6 /* ScannerFrameView.swift in Sources */, + 46C558CD2A4AAB5D00C68BA0 /* SimulatorCamera.swift in Sources */, + 46506F272A37810C0058D3F2 /* RealPreviewView.swift in Sources */, + 4630968B2A2D5423002ABA1A /* Types.swift in Sources */, + 4620AA6C2A2C03FC00BC8929 /* RatioOverlayView.swift in Sources */, + 460C0C6C2A4B52D800066334 /* PhotoCaptureDelegate.swift in Sources */, 26550AF61CFC7086007FF2DF /* CKCameraManager.m in Sources */, - A7686BFE1EC9CFEC00959216 /* CKCompressedImage.m in Sources */, - 269292861D3B81C800E07DDF /* CKOverlayObject.m in Sources */, - 2685AA241CFD89A300E4A446 /* CKCamera.m in Sources */, - 269292831D3B7D6000E07DDF /* CKCameraOverlayView.m in Sources */, - FC8E10CF253F8A23006D5AD0 /* CKMockPreview.m in Sources */, + 46C558CB2A4AAB3400C68BA0 /* CameraProtocol.swift in Sources */, + 4620AA722A2C4FA500BC8929 /* CameraManager.swift in Sources */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -251,6 +279,9 @@ OTHER_LDFLAGS = "-ObjC"; PRODUCT_NAME = "$(TARGET_NAME)"; SKIP_INSTALL = YES; + SWIFT_OBJC_BRIDGING_HEADER = "ReactNativeCameraKit/ReactNativeCameraKit-Bridging-Header.h"; + SWIFT_OPTIMIZATION_LEVEL = "-Onone"; + SWIFT_VERSION = 5.0; }; name = Debug; }; @@ -264,6 +295,8 @@ OTHER_LDFLAGS = "-ObjC"; PRODUCT_NAME = "$(TARGET_NAME)"; SKIP_INSTALL = YES; + SWIFT_OBJC_BRIDGING_HEADER = "ReactNativeCameraKit/ReactNativeCameraKit-Bridging-Header.h"; + SWIFT_VERSION = 5.0; }; name = Release; }; diff --git a/ios/ReactNativeCameraKit/CKCamera.h b/ios/ReactNativeCameraKit/CKCamera.h deleted file mode 100644 index cafa024fa0..0000000000 --- a/ios/ReactNativeCameraKit/CKCamera.h +++ /dev/null @@ -1,83 +0,0 @@ -#import -@import AVFoundation; - -#if __has_include() -#import -#else -#import "RCTConvert.h" -#endif - - -typedef void (^CaptureBlock)(NSDictionary *imageObject); -typedef void (^CallbackBlock)(BOOL success); - -typedef NS_ENUM(NSInteger, CKCameraType) { - CKCameraTypeBack, - CKCameraTypeFront, -}; - -@interface RCTConvert(CKCameraType) - -+ (CKCameraType)CKCameraType:(id)json; - -@end - -typedef NS_ENUM(NSInteger, CKCameraFlashMode) { - CKCameraFlashModeAuto, - CKCameraFlashModeOn, - CKCameraFlashModeOff -}; - -@interface RCTConvert(CKCameraFlashMode) - -+ (CKCameraFlashMode)CKCameraFlashMode:(id)json; - -@end - -typedef NS_ENUM(NSInteger, CKCameraTorchMode) { - CKCameraTorchModeOn, - CKCameraTorchModeOff -}; - -@interface RCTConvert(CKCameraTorchMode) - -+ (CKCameraTorchMode)CKCameraTorchMode:(id)json; - -@end - -typedef NS_ENUM(NSInteger, CKCameraFocusMode) { - CKCameraFocusModeOn, - CKCameraFocusModeOff, -}; - -@interface RCTConvert(CKCameraFocusMode) - -+ (CKCameraFocusMode)CKCameraFocusMode:(id)json; - -@end - -typedef NS_ENUM(NSInteger, CKCameraZoomMode) { - CKCameraZoomModeOn, - CKCameraZoomModeOff, -}; - -@interface RCTConvert(CKCameraZoomMode) - -+ (CKCameraZoomMode)CKCameraZoomMode:(id)json; - -@end - - -@interface CKCamera : UIView - -@property (nonatomic, readonly) AVCaptureDeviceInput *videoDeviceInput; - - -// api -- (void)snapStillImage:(NSDictionary*)options success:(CaptureBlock)block onError:(void (^)(NSString*))onError; - -- (void)setTorchMode:(AVCaptureTorchMode)torchMode; - -+ (NSURL*)saveToTmpFolder:(NSData*)data; - -@end diff --git a/ios/ReactNativeCameraKit/CKCamera.m b/ios/ReactNativeCameraKit/CKCamera.m deleted file mode 100644 index 2a2f15ada8..0000000000 --- a/ios/ReactNativeCameraKit/CKCamera.m +++ /dev/null @@ -1,1144 +0,0 @@ -@import Foundation; -@import Photos; - -#if __has_include() -#import -#import -#else -#import "UIView+React.h" -#import "RCTConvert.h" -#endif - -#import "CKCamera.h" -#import "CKCameraOverlayView.h" -#import "CKMockPreview.h" - -AVCaptureVideoOrientation AVCaptureVideoOrientationFromInterfaceOrientation(UIInterfaceOrientation orientation){ - if (orientation == UIInterfaceOrientationPortrait) { - return AVCaptureVideoOrientationPortrait; - } else if (orientation == UIInterfaceOrientationLandscapeLeft){ - return AVCaptureVideoOrientationLandscapeLeft; - } else if (orientation == UIInterfaceOrientationLandscapeRight){ - return AVCaptureVideoOrientationLandscapeRight; - } else if (orientation == UIInterfaceOrientationPortraitUpsideDown){ - return AVCaptureVideoOrientationPortraitUpsideDown; - } else { - @throw @"unknown interface orientation"; - } -} - -static void * CapturingStillImageContext = &CapturingStillImageContext; -static void * SessionRunningContext = &SessionRunningContext; - -typedef NS_ENUM( NSInteger, CKSetupResult ) { - CKSetupResultSuccess, - CKSetupResultCameraNotAuthorized, - CKSetupResultSessionConfigurationFailed -}; - -@implementation RCTConvert(CKCameraType) - -RCT_ENUM_CONVERTER(CKCameraType, (@{ - @"back": @(AVCaptureDevicePositionBack), - @"front": @(AVCaptureDevicePositionFront), - }), AVCaptureDevicePositionBack, integerValue) -@end - -@implementation RCTConvert(CKCameraTorchMode) - -RCT_ENUM_CONVERTER(CKCameraTorchMode, (@{ - @"on": @(AVCaptureTorchModeOn), - @"off": @(AVCaptureTorchModeOff) - }), AVCaptureTorchModeAuto, integerValue) -@end - -@implementation RCTConvert(CKCameraFlashMode) - -RCT_ENUM_CONVERTER(CKCameraFlashMode, (@{ - @"auto": @(AVCaptureFlashModeAuto), - @"on": @(AVCaptureFlashModeOn), - @"off": @(AVCaptureFlashModeOff) - }), AVCaptureFlashModeAuto, integerValue) - -@end - -@implementation RCTConvert(CKCameraFocusMode) - -RCT_ENUM_CONVERTER(CKCameraFocusMode, (@{ - @"on": @(CKCameraFocusModeOn), - @"off": @(CKCameraFocusModeOff) - }), CKCameraFocusModeOn, integerValue) - -@end - -@implementation RCTConvert(CKCameraZoomMode) - -RCT_ENUM_CONVERTER(CKCameraZoomMode, (@{ - @"on": @(CKCameraZoomModeOn), - @"off": @(CKCameraZoomModeOff) - }), CKCameraZoomModeOn, integerValue) - -@end - -@interface CKCamera () - - -@property (nonatomic, strong) AVCaptureVideoPreviewLayer *previewLayer; -@property (nonatomic, strong) CKMockPreview *mockPreview; -@property (nonatomic, strong) UIView *focusView; -@property (nonatomic, strong) NSTimer *focusViewTimer; -@property (nonatomic, strong) CKCameraOverlayView *cameraOverlayView; - -@property (nonatomic, strong) NSTimer *focusResetTimer; -@property (nonatomic) BOOL startFocusResetTimerAfterFocusing; -@property (nonatomic) NSInteger resetFocusTimeout; -@property (nonatomic) BOOL resetFocusWhenMotionDetected; -@property (nonatomic) BOOL tapToFocusEngaged; - -// session management -@property (nonatomic) dispatch_queue_t sessionQueue; -@property (nonatomic) AVCaptureSession *session; -@property (nonatomic, readwrite) AVCaptureDeviceInput *videoDeviceInput; -@property (nonatomic) AVCaptureMovieFileOutput *movieFileOutput; -@property (nonatomic) AVCaptureStillImageOutput *stillImageOutput; -@property (nonatomic, strong) AVCaptureMetadataOutput *metadataOutput; -@property (nonatomic, strong) NSString *codeStringValue; - - -// utilities -@property (nonatomic) CKSetupResult setupResult; -@property (nonatomic, getter=isSessionRunning) BOOL sessionRunning; -@property (nonatomic) UIBackgroundTaskIdentifier backgroundRecordingID; - -// scanner options -@property (nonatomic) BOOL showFrame; -@property (nonatomic) UIView *scannerView; -@property (nonatomic, strong) RCTDirectEventBlock onReadCode; -@property (nonatomic) CGFloat frameOffset; -@property (nonatomic) CGFloat frameHeight; -@property (nonatomic, strong) UIColor *laserColor; -@property (nonatomic, strong) UIColor *frameColor; -@property (nonatomic) UIView * dataReadingFrame; - -// camera options -@property (nonatomic) AVCaptureDevicePosition cameraType; -@property (nonatomic) AVCaptureFlashMode flashMode; -@property (nonatomic) AVCaptureTorchMode torchMode; -@property (nonatomic) CKCameraFocusMode focusMode; -@property (nonatomic) CKCameraZoomMode zoomMode; -@property (nonatomic, strong) NSString* ratioOverlay; -@property (nonatomic, strong) UIColor *ratioOverlayColor; -@property (nonatomic, strong) RCTDirectEventBlock onOrientationChange; - -@property (nonatomic) BOOL isAddedOberver; - -@end - -@implementation CKCamera - -#pragma mark - initializtion - -- (void)dealloc -{ - [self removeObservers]; - [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications]; -} - --(PHFetchOptions *)fetchOptions { - - PHFetchOptions *fetchOptions = [PHFetchOptions new]; - fetchOptions.sortDescriptors = @[[NSSortDescriptor sortDescriptorWithKey:@"creationDate" ascending:NO]]; - fetchOptions.predicate = [NSPredicate predicateWithFormat:@"mediaType = %d && creationDate <= %@",PHAssetMediaTypeImage, [NSDate date]]; - // iOS 9+ - if ([fetchOptions respondsToSelector:@selector(fetchLimit)]) { - fetchOptions.fetchLimit = 1; - } - - return fetchOptions; -} - -- (void)removeReactSubview:(UIView *)subview -{ - [subview removeFromSuperview]; - [super removeReactSubview:subview]; -} - -- (void)removeFromSuperview -{ - [[NSNotificationCenter defaultCenter] removeObserver:self name:UIDeviceOrientationDidChangeNotification object:[UIDevice currentDevice]]; - dispatch_async( self.sessionQueue, ^{ - if ( self.setupResult == CKSetupResultSuccess ) { - [self.session stopRunning]; - [self removeObservers]; - } - } ); - [super removeFromSuperview]; - -} - -- (instancetype)initWithFrame:(CGRect)frame { - self = [super initWithFrame:frame]; - - if (self){ - // Create the AVCaptureSession. - self.session = [[AVCaptureSession alloc] init]; - - // Listen to orientation changes - [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications]; - [[NSNotificationCenter defaultCenter] - addObserver:self selector:@selector(orientationChanged:) - name:UIDeviceOrientationDidChangeNotification - object:[UIDevice currentDevice]]; - - // Fit camera preview inside of viewport - self.session.sessionPreset = AVCaptureSessionPresetPhoto; - - // Communicate with the session and other session objects on this queue. - self.sessionQueue = dispatch_queue_create( "session queue", DISPATCH_QUEUE_SERIAL ); - - [self handleCameraPermission]; - -#if (TARGET_IPHONE_SIMULATOR) - // Create mock camera layer. When a photo is taken, we capture this layer and save it in place of a - // hardware input. - self.mockPreview = [[CKMockPreview alloc] initWithFrame:CGRectZero]; - [self addSubview:self.mockPreview]; -#else - self.previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.session]; - [self.layer addSublayer:self.previewLayer]; - self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill; - [self setupCaptureSession]; -#endif - - UIView *focusView = [[UIView alloc] initWithFrame:CGRectZero]; - focusView.backgroundColor = [UIColor clearColor]; - focusView.layer.borderColor = [UIColor yellowColor].CGColor; - focusView.layer.borderWidth = 1; - focusView.hidden = YES; - self.focusView = focusView; - - [self addSubview:self.focusView]; - - // defaults - self.zoomMode = CKCameraZoomModeOn; - self.flashMode = CKCameraFlashModeAuto; - self.focusMode = CKCameraFocusModeOn; - - self.frameColor = [UIColor whiteColor]; - self.laserColor = [UIColor redColor]; - self.frameOffset = 30; - self.frameHeight = 200; - } - - return self; -} - -- (void)setCameraType:(AVCaptureDevicePosition)cameraType { - if (cameraType != _cameraType) { - _cameraType = cameraType; - [self changeCamera:cameraType]; - } -} - -- (void)setFlashMode:(AVCaptureFlashMode)flashMode { - if (flashMode != _flashMode) { - _flashMode = flashMode; - [CKCamera setFlashMode:flashMode forDevice:self.videoDeviceInput.device]; - } -} - --(void)setTorchMode:(AVCaptureTorchMode)torchMode { - _torchMode = torchMode; - if (self.videoDeviceInput && [self.videoDeviceInput.device isTorchModeSupported:torchMode] && self.videoDeviceInput.device.hasTorch) { - NSError* err = nil; - if ( [self.videoDeviceInput.device lockForConfiguration:&err] ) { - [self.videoDeviceInput.device setTorchMode:torchMode]; - [self.videoDeviceInput.device unlockForConfiguration]; - } - } -} - - -+ (void)setFlashMode:(AVCaptureFlashMode)flashMode forDevice:(AVCaptureDevice *)device -{ - if (device.hasFlash && [device isFlashModeSupported:flashMode] ) { - NSError *error = nil; - if ([device lockForConfiguration:&error] ) { - device.flashMode = flashMode; - [device unlockForConfiguration]; - } else { - NSLog(@"Could not lock device for configuration: %@", error); - } - } -} - -- (void)setFocusMode:(CKCameraFocusMode)focusMode { - _focusMode = focusMode; - if (self.focusMode == CKCameraFocusModeOn) { - UITapGestureRecognizer *tapGesture = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(focusAndExposeTap:)]; - [self addGestureRecognizer:tapGesture]; - } else { - NSArray *gestures = [self gestureRecognizers]; - for (id object in gestures) { - if ([object class] == UITapGestureRecognizer.class) { - [self removeGestureRecognizer:object]; - } - } - } -} - -- (void)setZoomMode:(CKCameraZoomMode)zoomMode { - _zoomMode = zoomMode; - if (zoomMode == CKCameraZoomModeOn) { - UIPinchGestureRecognizer *pinchGesture = [[UIPinchGestureRecognizer alloc] initWithTarget:self action:@selector(handlePinchToZoomRecognizer:)]; - [self addGestureRecognizer:pinchGesture]; - } else { - NSArray *gestures = [self gestureRecognizers]; - for (id object in gestures) { - if ([object class] == UIPinchGestureRecognizer.class) { - [self removeGestureRecognizer:object]; - } - } - } -} - -- (void)setRatio:(NSString*)ratio { - if (ratio && ![ratio isEqualToString:@""]) { - self.ratioOverlay = ratio; - } -} - -- (void)setLaserColor:(UIColor *)color { - if (color != nil) { - _laserColor = color; - } -} - -- (void)setFrameColor:(UIColor *)color { - if (color != nil) { - _frameColor = color; - } -} - -- (void) orientationChanged:(NSNotification *)notification -{ - if (!self.onOrientationChange) { - return; - } - - // PORTRAIT: 0, // ⬆️ - // LANDSCAPE_LEFT: 1, // ⬅️ - // PORTRAIT_UPSIDE_DOWN: 2, // ⬇️ - // LANDSCAPE_RIGHT: 3, // ➡️ - - UIDevice * device = notification.object; - UIDeviceOrientation orientation = device.orientation; - if (orientation == UIDeviceOrientationPortrait) { - self.onOrientationChange(@{@"orientation": @0}); - } else if (orientation == UIDeviceOrientationLandscapeLeft) { - self.onOrientationChange(@{@"orientation": @1}); - } else if (orientation == UIDeviceOrientationPortraitUpsideDown) { - self.onOrientationChange(@{@"orientation": @2}); - } else if (orientation == UIDeviceOrientationLandscapeRight) { - self.onOrientationChange(@{@"orientation": @3}); - } -} - -- (void) setupCaptureSession { - // Setup the capture session. - // In general it is not safe to mutate an AVCaptureSession or any of its inputs, outputs, or connections from multiple threads at the same time. - // Why not do all of this on the main queue? - // Because -[AVCaptureSession startRunning] is a blocking call which can take a long time. We dispatch session setup to the sessionQueue - // so that the main queue isn't blocked, which keeps the UI responsive. - dispatch_async( self.sessionQueue, ^{ - if ( self.setupResult != CKSetupResultSuccess ) { - return; - } - - self.backgroundRecordingID = UIBackgroundTaskInvalid; - NSError *error = nil; - - AVCaptureDevice *videoDevice = [CKCamera deviceWithMediaType:AVMediaTypeVideo preferringPosition:AVCaptureDevicePositionBack]; - AVCaptureDeviceInput *videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error]; - - [self.session beginConfiguration]; - - if ( [self.session canAddInput:videoDeviceInput] ) { - [self.session addInput:videoDeviceInput]; - self.videoDeviceInput = videoDeviceInput; - [CKCamera setFlashMode:self.flashMode forDevice:self.videoDeviceInput.device]; - } - else { - self.setupResult = CKSetupResultSessionConfigurationFailed; - } - - AVCaptureMovieFileOutput *movieFileOutput = [[AVCaptureMovieFileOutput alloc] init]; - if ( [self.session canAddOutput:movieFileOutput] ) { - [self.session addOutput:movieFileOutput]; - AVCaptureConnection *connection = [movieFileOutput connectionWithMediaType:AVMediaTypeVideo]; - if ( connection.isVideoStabilizationSupported ) { - connection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeAuto; - } - self.movieFileOutput = movieFileOutput; - } - else { - self.setupResult = CKSetupResultSessionConfigurationFailed; - } - - AVCaptureStillImageOutput *stillImageOutput = [[AVCaptureStillImageOutput alloc] init]; - if ( [self.session canAddOutput:stillImageOutput] ) { - stillImageOutput.outputSettings = @{AVVideoCodecKey : AVVideoCodecJPEG}; - [self.session addOutput:stillImageOutput]; - self.stillImageOutput = stillImageOutput; - } - else { - self.setupResult = CKSetupResultSessionConfigurationFailed; - } - - AVCaptureMetadataOutput * output = [[AVCaptureMetadataOutput alloc] init]; - if ([self.session canAddOutput:output]) { - self.metadataOutput = output; - [self.session addOutput:self.metadataOutput]; - [self.metadataOutput setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()]; - [self.metadataOutput setMetadataObjectTypes:[self.metadataOutput availableMetadataObjectTypes]]; - } - - [self.session commitConfiguration]; - dispatch_async(dispatch_get_main_queue(), ^{ - [self setInitialPreviewLayerVideoOrientation]; - }); - } ); -} - -- (void)setInitialPreviewLayerVideoOrientation{ - UIInterfaceOrientation initialInterfaceOrientation = [UIApplication sharedApplication].statusBarOrientation; - self.previewLayer.connection.videoOrientation = AVCaptureVideoOrientationFromInterfaceOrientation(initialInterfaceOrientation); -} - --(void)handleCameraPermission { - - switch ( [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo] ) - { - case AVAuthorizationStatusAuthorized: - { - // The user has previously granted access to the camera. - break; - } - case AVAuthorizationStatusNotDetermined: - { - // The user has not yet been presented with the option to grant video access. - // We suspend the session queue to delay session setup until the access request has completed to avoid - // asking the user for audio access if video access is denied. - // Note that audio access will be implicitly requested when we create an AVCaptureDeviceInput for audio during session setup. - dispatch_suspend( self.sessionQueue ); - [AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^( BOOL granted ) { - if ( ! granted ) { - self.setupResult = CKSetupResultCameraNotAuthorized; - } - dispatch_resume( self.sessionQueue ); - }]; - break; - } - default: - { - // The user has previously denied access. - self.setupResult = CKSetupResultCameraNotAuthorized; - break; - } - } -} - --(void)reactSetFrame:(CGRect)frame { - [super reactSetFrame:frame]; - - self.previewLayer.frame = self.bounds; - -#if TARGET_IPHONE_SIMULATOR - self.mockPreview.frame = self.bounds; - return; -#endif - - [self setOverlayRatioView]; - - dispatch_async( self.sessionQueue, ^{ - switch ( self.setupResult ) - { - case CKSetupResultSuccess: - { - // Only setup observers and start the session running if setup succeeded. - [self addObservers]; - [self.session startRunning]; - self.sessionRunning = self.session.isRunning; - if (self.showFrame) { - dispatch_async(dispatch_get_main_queue(), ^{ - [self addFrameForScanner]; - }); - } - break; - } - case CKSetupResultCameraNotAuthorized: - { - // dispatch_async( dispatch_get_main_queue(), ^{ - // NSString *message = NSLocalizedString( @"AVCam doesn't have permission to use the camera, please change privacy settings", @"Alert message when the user has denied access to the camera" ); - // UIAlertController *alertController = [UIAlertController alertControllerWithTitle:@"AVCam" message:message preferredStyle:UIAlertControllerStyleAlert]; - // UIAlertAction *cancelAction = [UIAlertAction actionWithTitle:NSLocalizedString( @"OK", @"Alert OK button" ) style:UIAlertActionStyleCancel handler:nil]; - // [alertController addAction:cancelAction]; - // // Provide quick access to Settings. - // UIAlertAction *settingsAction = [UIAlertAction actionWithTitle:NSLocalizedString( @"Settings", @"Alert button to open Settings" ) style:UIAlertActionStyleDefault handler:^( UIAlertAction *action ) { - // [[UIApplication sharedApplication] openURL:[NSURL URLWithString:UIApplicationOpenSettingsURLString]]; - // }]; - // [alertController addAction:settingsAction]; - // [self presentViewController:alertController animated:YES completion:nil]; - // } ); - break; - } - case CKSetupResultSessionConfigurationFailed: - { - // dispatch_async( dispatch_get_main_queue(), ^{ - // NSString *message = NSLocalizedString( @"Unable to capture media", @"Alert message when something goes wrong during capture session configuration" ); - // UIAlertController *alertController = [UIAlertController alertControllerWithTitle:@"AVCam" message:message preferredStyle:UIAlertControllerStyleAlert]; - // UIAlertAction *cancelAction = [UIAlertAction actionWithTitle:NSLocalizedString( @"OK", @"Alert OK button" ) style:UIAlertActionStyleCancel handler:nil]; - // [alertController addAction:cancelAction]; - // [self presentViewController:alertController animated:YES completion:nil]; - // } ); - break; - } - } - } ); -} - --(void)setRatioOverlay:(NSString *)ratioOverlay { - _ratioOverlay = ratioOverlay; - [self.cameraOverlayView setRatio:self.ratioOverlay]; -} - --(void)setOverlayRatioView { - if (self.ratioOverlay) { - [self.cameraOverlayView removeFromSuperview]; - self.cameraOverlayView = [[CKCameraOverlayView alloc] initWithFrame:self.bounds ratioString:self.ratioOverlay overlayColor:self.ratioOverlayColor]; - [self addSubview:self.cameraOverlayView]; - } -} - - -#pragma mark - - - -+ (AVCaptureDevice *)deviceWithMediaType:(NSString *)mediaType preferringPosition:(AVCaptureDevicePosition)position { - NSArray *devices = [AVCaptureDevice devicesWithMediaType:mediaType]; - AVCaptureDevice *captureDevice = devices.firstObject; - - for (AVCaptureDevice *device in devices) { - if (device.position == position) { - captureDevice = device; - break; - } - } - - return captureDevice; -} - - -#pragma mark - actions - - - -- (void)snapStillImage:(NSDictionary*)options success:(CaptureBlock)onSuccess onError:(void (^)(NSString*))onError { - - #if TARGET_IPHONE_SIMULATOR - [self capturePreviewLayer:options success:onSuccess onError:onError]; - return; - #endif - - dispatch_async( self.sessionQueue, ^{ - AVCaptureConnection *connection = [self.stillImageOutput connectionWithMediaType:AVMediaTypeVideo]; - - UIImageOrientation imageOrientation = UIImageOrientationUp; - switch([UIDevice currentDevice].orientation) { - default: - case UIDeviceOrientationPortrait: - connection.videoOrientation = AVCaptureVideoOrientationPortrait; - imageOrientation = UIImageOrientationUp; - break; - case UIDeviceOrientationPortraitUpsideDown: - connection.videoOrientation = AVCaptureVideoOrientationPortraitUpsideDown; - imageOrientation = UIImageOrientationDown; - break; - case UIDeviceOrientationLandscapeLeft: - imageOrientation = UIImageOrientationRight; - connection.videoOrientation = AVCaptureVideoOrientationLandscapeRight; - break; - case UIDeviceOrientationLandscapeRight: - connection.videoOrientation = AVCaptureVideoOrientationLandscapeLeft; - imageOrientation = UIImageOrientationRightMirrored; - break; - } - - // Capture a still image. - [self.stillImageOutput captureStillImageAsynchronouslyFromConnection:connection completionHandler:^( CMSampleBufferRef imageDataSampleBuffer, NSError *error ) { - if (!imageDataSampleBuffer) { - NSLog(@"Could not capture still image: %@", error); - onError(@"Could not capture still image"); - return; - } - - // The sample buffer is not retained. Create image data before saving the still image to the photo library asynchronously. - NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer]; - - [self writeCapturedImageData:imageData onSuccess:onSuccess onError:onError]; - [self resetFocus]; - }]; - }); -} - -- (void)capturePreviewLayer:(NSDictionary*)options success:(CaptureBlock)onSuccess onError:(void (^)(NSString*))onError -{ - dispatch_async(dispatch_get_main_queue(), ^{ - if (self.mockPreview != nil) { - UIImage *previewSnapshot = [self.mockPreview snapshotWithTimestamp:YES]; // Generate snapshot from main UI thread - dispatch_async( self.sessionQueue, ^{ // write image async - [self writeCapturedImageData:UIImageJPEGRepresentation(previewSnapshot, 0.85) onSuccess:onSuccess onError:onError]; - }); - } else { - onError(@"Simulator image could not be captured from preview layer"); - } - }); -} - -- (void)writeCapturedImageData:(NSData *)imageData onSuccess:(CaptureBlock)onSuccess onError:(void (^)(NSString*))onError { - NSMutableDictionary *imageInfoDict = [[NSMutableDictionary alloc] init]; - - NSNumber *length = [NSNumber numberWithInteger:imageData.length]; - if (length) { - imageInfoDict[@"size"] = length; - } - - NSURL *temporaryFileURL = [CKCamera saveToTmpFolder:imageData]; - if (temporaryFileURL) { - imageInfoDict[@"uri"] = temporaryFileURL.description; - imageInfoDict[@"name"] = temporaryFileURL.lastPathComponent; - } - - onSuccess(imageInfoDict); -} - -- (void)changeCamera:(AVCaptureDevicePosition)preferredPosition -{ - // Avoid chaning device inputs when camera input is denied by the user, since both front and rear vido input devices will be nil - if ( self.setupResult != CKSetupResultSuccess ) { - return; - } -#if TARGET_IPHONE_SIMULATOR - dispatch_async( dispatch_get_main_queue(), ^{ - [self.mockPreview randomize]; - }); - return; -#endif - - dispatch_async( self.sessionQueue, ^{ - AVCaptureDevice *currentVideoDevice = self.videoDeviceInput.device; - AVCaptureDevice *videoDevice = [CKCamera deviceWithMediaType:AVMediaTypeVideo preferringPosition:preferredPosition]; - AVCaptureDeviceInput *videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:nil]; - - [self removeObservers]; - [self.session beginConfiguration]; - - // Remove the existing device input first, since using the front and back camera simultaneously is not supported. - [self.session removeInput:self.videoDeviceInput]; - - if ( [self.session canAddInput:videoDeviceInput] ) { - [[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:currentVideoDevice]; - - [CKCamera setFlashMode:self.flashMode forDevice:videoDevice]; - - [self.session addInput:videoDeviceInput]; - self.videoDeviceInput = videoDeviceInput; - } - else { - [self.session addInput:self.videoDeviceInput]; - } - - AVCaptureConnection *connection = [self.movieFileOutput connectionWithMediaType:AVMediaTypeVideo]; - if ( connection.isVideoStabilizationSupported ) { - connection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeAuto; - } - - [self.session commitConfiguration]; - [self addObservers]; - } ); -} - -+(NSURL*)saveToTmpFolder:(NSData*)data { - NSString *temporaryFileName = [NSProcessInfo processInfo].globallyUniqueString; - NSString *temporaryFilePath = [NSTemporaryDirectory() stringByAppendingPathComponent:[temporaryFileName stringByAppendingPathExtension:@"jpg"]]; - NSURL *temporaryFileURL = [NSURL fileURLWithPath:temporaryFilePath]; - - NSError *error = nil; - [data writeToURL:temporaryFileURL options:NSDataWritingAtomic error:&error]; - - if (error) { - NSLog(@"Error occured while writing image data to a temporary file: %@", error); - } - return temporaryFileURL; -} - - --(void) handlePinchToZoomRecognizer:(UIPinchGestureRecognizer*)pinchRecognizer { - if (pinchRecognizer.state == UIGestureRecognizerStateChanged) { - [self zoom:pinchRecognizer.velocity]; - } -} - - -- (void)focusAndExposeTap:(UIGestureRecognizer *)gestureRecognizer -{ - CGPoint touchPoint = [gestureRecognizer locationInView:self]; - CGPoint devicePoint = [(AVCaptureVideoPreviewLayer *)self.previewLayer captureDevicePointOfInterestForPoint:touchPoint]; - - // Engage manual focus - [self focusWithMode:AVCaptureFocusModeAutoFocus exposeWithMode:AVCaptureExposureModeAutoExpose atDevicePoint:devicePoint monitorSubjectAreaChange:YES]; - - // Disengage manual focus once focusing finishing (if focusTimeout > 0) - // See [self observeValueForKeyPath] - self.startFocusResetTimerAfterFocusing = YES; - - self.tapToFocusEngaged = YES; - - // Animate focus rectangle - CGFloat halfDiagonal = 73; - CGFloat halfDiagonalAnimation = halfDiagonal*2; - - CGRect focusViewFrame = CGRectMake(touchPoint.x - (halfDiagonal/2), - touchPoint.y - (halfDiagonal/2), - halfDiagonal, - halfDiagonal); - - self.focusView.alpha = 0; - self.focusView.hidden = NO; - self.focusView.frame = CGRectMake(touchPoint.x - (halfDiagonalAnimation/2), - touchPoint.y - (halfDiagonalAnimation/2), - halfDiagonalAnimation, - halfDiagonalAnimation); - - [UIView animateWithDuration:0.2 animations:^{ - self.focusView.frame = focusViewFrame; - self.focusView.alpha = 1; - } completion:^(BOOL finished) { - self.focusView.alpha = 1; - self.focusView.frame = focusViewFrame; - }]; -} - -- (void)resetFocus -{ - if (self.focusResetTimer) { - [self.focusResetTimer invalidate]; - self.focusResetTimer = nil; - } - - // Resetting focus to continuous focus, so not interested in resetting anymore - self.startFocusResetTimerAfterFocusing = NO; - - // Avoid showing reset-focus animation after each photo capture - if (!self.tapToFocusEngaged) { - return; - } - - self.tapToFocusEngaged = NO; - - // 1. Reset actual camera focus - CGPoint deviceCenter = CGPointMake(0.5, 0.5); - [self focusWithMode:AVCaptureFocusModeContinuousAutoFocus exposeWithMode:AVCaptureExposureModeContinuousAutoExposure atDevicePoint:deviceCenter monitorSubjectAreaChange:NO]; - - // 2. Create animation to indicate the new focus location - CGPoint layerCenter = [(AVCaptureVideoPreviewLayer *)self.previewLayer pointForCaptureDevicePointOfInterest:deviceCenter]; - - CGFloat halfDiagonal = 123; - CGFloat halfDiagonalAnimation = halfDiagonal*2; - - CGRect focusViewFrame = CGRectMake(layerCenter.x - (halfDiagonal/2), layerCenter.y - (halfDiagonal/2), halfDiagonal, halfDiagonal); - CGRect focusViewFrameForAnimation = CGRectMake(layerCenter.x - (halfDiagonalAnimation/2), layerCenter.y - (halfDiagonalAnimation/2), halfDiagonalAnimation, halfDiagonalAnimation); - - self.focusView.alpha = 0; - self.focusView.hidden = NO; - self.focusView.frame = focusViewFrameForAnimation; - - [UIView animateWithDuration:0.2 animations:^{ - self.focusView.frame = focusViewFrame; - self.focusView.alpha = 1; - } completion:^(BOOL finished) { - self.focusView.alpha = 1; - self.focusView.frame = focusViewFrame; - - if (self.focusViewTimer) { - [self.focusViewTimer invalidate]; - } - self.focusViewTimer = [NSTimer scheduledTimerWithTimeInterval:2 repeats:NO block:^(NSTimer *timer) { - [UIView animateWithDuration:0.2 animations:^{ - self.focusView.alpha = 0; - } completion:^(BOOL finished) { - self.focusView.frame = CGRectZero; - self.focusView.hidden = YES; - }]; - }]; - }]; -} - -- (void)focusWithMode:(AVCaptureFocusMode)focusMode exposeWithMode:(AVCaptureExposureMode)exposureMode atDevicePoint:(CGPoint)point monitorSubjectAreaChange:(BOOL)monitorSubjectAreaChange -{ - dispatch_async( self.sessionQueue, ^{ - AVCaptureDevice *device = self.videoDeviceInput.device; - NSError *error = nil; - if (![device lockForConfiguration:&error]) { - NSLog(@"Unable to device.lockForConfiguration() %@", error); - return; - } - - // Setting (focus/exposure)PointOfInterest alone does not initiate a (focus/exposure) operation. - // Call -set(Focus/Exposure)Mode: to apply the new point of interest. - if ( device.isFocusPointOfInterestSupported && [device isFocusModeSupported:focusMode] ) { - device.focusPointOfInterest = point; - device.focusMode = focusMode; - } - - if ( device.isExposurePointOfInterestSupported && [device isExposureModeSupported:exposureMode] ) { - device.exposurePointOfInterest = point; - device.exposureMode = exposureMode; - } - - device.subjectAreaChangeMonitoringEnabled = monitorSubjectAreaChange && self.resetFocusWhenMotionDetected; - [device unlockForConfiguration]; - }); -} - -- (void)zoom:(CGFloat)velocity { - if (isnan(velocity)) { - return; - } - const CGFloat pinchVelocityDividerFactor = 20.0f; // TODO: calibrate or make this component's property - NSError *error = nil; - AVCaptureDevice *device = [[self videoDeviceInput] device]; - if ([device lockForConfiguration:&error]) { - CGFloat zoomFactor = device.videoZoomFactor + atan(velocity / pinchVelocityDividerFactor); - if (zoomFactor > device.activeFormat.videoMaxZoomFactor) { - zoomFactor = device.activeFormat.videoMaxZoomFactor; - } else if (zoomFactor < 1) { - zoomFactor = 1.0f; - } - device.videoZoomFactor = zoomFactor; - [device unlockForConfiguration]; - } else { - //NSLog(@"error: %@", error); - } -} - - -#pragma mark - Frame for Scanner Settings - -- (void)didMoveToWindow { - [super didMoveToWindow]; - if (self.sessionRunning && self.dataReadingFrame) { - dispatch_async(dispatch_get_main_queue(), ^{ - [self startAnimatingScanner:self.dataReadingFrame]; - }); - } -} - -- (void)addFrameForScanner { - CGFloat frameWidth = self.bounds.size.width - 2 * self.frameOffset; - if (!self.dataReadingFrame) { - self.dataReadingFrame = [[UIView alloc] initWithFrame:CGRectMake(0, 0, frameWidth, self.frameHeight)]; // - self.dataReadingFrame.center = self.center; - self.dataReadingFrame.backgroundColor = [UIColor clearColor]; - [self createCustomFramesForView:self.dataReadingFrame]; - [self addSubview:self.dataReadingFrame]; - - [self startAnimatingScanner:self.dataReadingFrame]; - - [self addVisualEffects:self.dataReadingFrame.frame]; - - CGRect visibleRect = [self.previewLayer metadataOutputRectOfInterestForRect:self.dataReadingFrame.frame]; - self.metadataOutput.rectOfInterest = visibleRect; - } -} - -- (void)createCustomFramesForView:(UIView *)frameView { - CGFloat cornerSize = 20.f; - CGFloat cornerWidth = 2.f; - for (int i = 0; i < 8; i++) { - CGFloat x = 0.0; - CGFloat y = 0.0; - CGFloat width = 0.0; - CGFloat height = 0.0; - switch (i) { - case 0: - x = 0; y = 0; width = cornerWidth; height = cornerSize; - break; - case 1: - x = 0; y = 0; width = cornerSize; height = cornerWidth; - break; - case 2: - x = CGRectGetWidth(frameView.bounds) - cornerSize; y = 0; width = cornerSize; height = cornerWidth; - break; - case 3: - x = CGRectGetWidth(frameView.bounds) - cornerWidth; y = 0; width = cornerWidth; height = cornerSize; - break; - case 4: - x = CGRectGetWidth(frameView.bounds) - cornerWidth; - y = CGRectGetHeight(frameView.bounds) - cornerSize; width = cornerWidth; height = cornerSize; - break; - case 5: - x = CGRectGetWidth(frameView.bounds) - cornerSize; - y = CGRectGetHeight(frameView.bounds) - cornerWidth; width = cornerSize; height = cornerWidth; - break; - case 6: - x = 0; y = CGRectGetHeight(frameView.bounds) - cornerWidth; width = cornerSize; height = cornerWidth; - break; - case 7: - x = 0; y = CGRectGetHeight(frameView.bounds) - cornerSize; width = cornerWidth; height = cornerSize; - break; - } - UIView * cornerView = [[UIView alloc] initWithFrame:CGRectMake(x, y, width, height)]; - cornerView.backgroundColor = self.frameColor; - [frameView addSubview:cornerView]; - } -} - -- (void)addVisualEffects:(CGRect)inputRect { - UIView *topView = [[UIView alloc] initWithFrame:CGRectMake(0, 0, self.frame.size.width, inputRect.origin.y)]; - topView.backgroundColor = [UIColor colorWithRed:0.0/255.0 green:0.0/255.0 blue:0.0/255.0 alpha:0.4]; - [self addSubview:topView]; - - UIView *leftSideView = [[UIView alloc] initWithFrame:CGRectMake(0, inputRect.origin.y, self.frameOffset, self.frameHeight)]; //paddingForScanner scannerHeight - leftSideView.backgroundColor = [UIColor colorWithRed:0.0/255.0 green:0.0/255.0 blue:0.0/255.0 alpha:0.4]; - [self addSubview:leftSideView]; - - UIView *rightSideView = [[UIView alloc] initWithFrame:CGRectMake(inputRect.size.width + self.frameOffset, inputRect.origin.y, self.frameOffset, self.frameHeight)]; - rightSideView.backgroundColor = [UIColor colorWithRed:0.0/255.0 green:0.0/255.0 blue:0.0/255.0 alpha:0.4]; - [self addSubview:rightSideView]; - - UIView *bottomView = [[UIView alloc] initWithFrame:CGRectMake(0, inputRect.origin.y + self.frameHeight, self.frame.size.width, - self.frame.size.height - inputRect.origin.y - self.frameHeight)]; - bottomView.backgroundColor = [UIColor colorWithRed:0.0/255.0 green:0.0/255.0 blue:0.0/255.0 alpha:0.4]; - [self addSubview:bottomView]; -} - -- (void)startAnimatingScanner:(UIView *)inputView { - if (!self.scannerView) { - self.scannerView = [[UIView alloc] initWithFrame:CGRectMake(2, 0, inputView.frame.size.width - 4, 2)]; - self.scannerView.backgroundColor = self.laserColor; - } - if (self.scannerView.frame.origin.y != 0) { - [self.scannerView setFrame:CGRectMake(2, 0, inputView.frame.size.width - 4, 2)]; - } - [inputView addSubview:self.scannerView]; - [UIView animateWithDuration:3 delay:0 options:(UIViewAnimationOptionAutoreverse | UIViewAnimationOptionRepeat) animations:^{ - CGFloat middleX = inputView.frame.size.width / 2; - self.scannerView.center = CGPointMake(middleX, inputView.frame.size.height - 1); - } completion:^(BOOL finished) {}]; -} - -- (void)stopAnimatingScanner { - [self.scannerView removeFromSuperview]; -} - -//Observer actions - -- (void)didEnterBackground:(NSNotification *)notification { - [self stopAnimatingScanner]; -} - -- (void)willEnterForeground:(NSNotification *)notification { - [self startAnimatingScanner:self.dataReadingFrame]; -} - -#pragma mark - observers - -- (void)didChangeStatusBarOrientation:(NSNotification *)notification { - UIInterfaceOrientation currentInterfaceOrientation = [UIApplication sharedApplication].statusBarOrientation; - self.previewLayer.connection.videoOrientation = AVCaptureVideoOrientationFromInterfaceOrientation(currentInterfaceOrientation); -} - -- (void)addObservers -{ - - if (!self.isAddedOberver) { - [self.session addObserver:self forKeyPath:@"running" options:NSKeyValueObservingOptionNew context:SessionRunningContext]; - [self.stillImageOutput addObserver:self forKeyPath:@"capturingStillImage" options:NSKeyValueObservingOptionNew context:CapturingStillImageContext]; - - [self.videoDeviceInput.device addObserver:self forKeyPath:@"adjustingFocus" options:NSKeyValueObservingOptionNew context:nil]; - [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:self.videoDeviceInput.device]; - [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(sessionRuntimeError:) name:AVCaptureSessionRuntimeErrorNotification object:self.session]; - // A session can only run when the app is full screen. It will be interrupted in a multi-app layout, introduced in iOS 9, - // see also the documentation of AVCaptureSessionInterruptionReason. Add observers to handle these session interruptions - // and show a preview is paused message. See the documentation of AVCaptureSessionWasInterruptedNotification for other - // interruption reasons. - [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(sessionWasInterrupted:) name:AVCaptureSessionWasInterruptedNotification object:self.session]; - //Observers for re-usage animation when app go to the background and back - [[NSNotificationCenter defaultCenter] addObserver:self - selector:@selector(didEnterBackground:) name:UIApplicationDidEnterBackgroundNotification - object:nil]; - [[NSNotificationCenter defaultCenter] addObserver:self - selector:@selector(willEnterForeground:) - name:UIApplicationWillEnterForegroundNotification - object:nil]; - [[NSNotificationCenter defaultCenter] addObserver:self - selector:@selector(didChangeStatusBarOrientation:) - name:UIApplicationDidChangeStatusBarOrientationNotification - object:nil]; - self.isAddedOberver = YES; - } -} - -//UIApplicationDidEnterBackgroundNotification NS_AVAILABLE_IOS(4_0); -//UIKIT_EXTERN NSNotificationName const UIApplicationWillEnterForegroundNotification - -- (void)sessionWasInterrupted:(NSNotification *)notification -{ - // In some scenarios we want to enable the user to resume the session running. - // For example, if music playback is initiated via control center while using AVCam, - // then the user can let AVCam resume the session running, which will stop music playback. - // Note that stopping music playback in control center will not automatically resume the session running. - // Also note that it is not always possible to resume, see -[resumeInterruptedSession:]. - BOOL showResumeButton = NO; - - // In iOS 9 and later, the userInfo dictionary contains information on why the session was interrupted. - if ( &AVCaptureSessionInterruptionReasonKey ) { - AVCaptureSessionInterruptionReason reason = [notification.userInfo[AVCaptureSessionInterruptionReasonKey] integerValue]; - //NSLog( @"Capture session was interrupted with reason %ld", (long)reason ); - - if ( reason == AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient || - reason == AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient ) { - showResumeButton = YES; - } - } -} - - -- (void)removeObservers -{ - if (self.isAddedOberver) { - [[NSNotificationCenter defaultCenter] removeObserver:self]; - [self.session removeObserver:self forKeyPath:@"running" context:SessionRunningContext]; - [self.stillImageOutput removeObserver:self forKeyPath:@"capturingStillImage" context:CapturingStillImageContext]; - [self.videoDeviceInput.device removeObserver:self forKeyPath:@"adjustingFocus"]; - self.isAddedOberver = NO; - } -} - -- (void)sessionRuntimeError:(NSNotification *)notification -{ - NSError *error = notification.userInfo[AVCaptureSessionErrorKey]; - //NSLog( @"Capture session runtime error: %@", error ); - - // Automatically try to restart the session running if media services were reset and the last start running succeeded. - // Otherwise, enable the user to try to resume the session running. - if ( error.code == AVErrorMediaServicesWereReset ) { - dispatch_async( self.sessionQueue, ^{ - if ( self.isSessionRunning ) { - [self.session startRunning]; - self.sessionRunning = self.session.isRunning; - } - else { - } - } ); - } -} - - -- (void)subjectAreaDidChange:(NSNotification *)notification -{ - [self resetFocus]; -} - -- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context -{ - if (context == CapturingStillImageContext) - { - // Flash/dim preview to indicate shutter action - BOOL isCapturingStillImage = [change[NSKeyValueChangeNewKey] boolValue]; - if ( isCapturingStillImage ) - { - dispatch_async(dispatch_get_main_queue(), ^{ - self.alpha = 0.0; - [UIView animateWithDuration:0.35 animations:^{ - self.alpha = 1.0; - }]; - }); - } - } - else if ([keyPath isEqualToString:@"adjustingFocus"]) - { - // Note: oldKey is not available (value is always NO it seems) so we only check on newKey - BOOL isFocusing = [change[NSKeyValueChangeNewKey] boolValue]; - if (self.startFocusResetTimerAfterFocusing == YES && !isFocusing && self.resetFocusTimeout > 0) - { - self.startFocusResetTimerAfterFocusing = NO; - - // Disengage manual focus after focusTimeout milliseconds - NSTimeInterval focusTimeoutSeconds = self.resetFocusTimeout / 1000; - self.focusResetTimer = [NSTimer scheduledTimerWithTimeInterval:focusTimeoutSeconds repeats:NO block:^(NSTimer *timer) { - [self resetFocus]; - }]; - } - } - else if (context == SessionRunningContext) - { -// BOOL isSessionRunning = [change[NSKeyValueChangeNewKey] boolValue]; -// -// dispatch_async( dispatch_get_main_queue(), ^{ -// // Only enable the ability to change camera if the device has more than one camera. -// self.cameraButton.enabled = isSessionRunning && ( [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo].count > 1 ); -// self.recordButton.enabled = isSessionRunning; -// self.stillButton.enabled = isSessionRunning; -// } ); - } - else - { - [super observeValueForKeyPath:keyPath ofObject:object change:change context:context]; - } -} - -#pragma mark - AVCaptureMetadataOutputObjectsDelegate - -- (void)captureOutput:(AVCaptureOutput *)output -didOutputMetadataObjects:(NSArray<__kindof AVMetadataObject *> *)metadataObjects - fromConnection:(AVCaptureConnection *)connection { - - for(AVMetadataObject *metadataObject in metadataObjects) - { - if ([metadataObject isKindOfClass:[AVMetadataMachineReadableCodeObject class]] && [self isSupportedBarCodeType:metadataObject.type]) { - - AVMetadataMachineReadableCodeObject *code = (AVMetadataMachineReadableCodeObject*)[self.previewLayer transformedMetadataObjectForMetadataObject:metadataObject]; - if (self.onReadCode && code.stringValue && ![code.stringValue isEqualToString:self.codeStringValue]) { - self.onReadCode(@{@"codeStringValue": code.stringValue}); - [self stopAnimatingScanner]; - } - } - } -} - -- (BOOL)isSupportedBarCodeType:(NSString *)currentType { - BOOL result = NO; - NSArray *supportedBarcodeTypes = @[AVMetadataObjectTypeUPCECode,AVMetadataObjectTypeCode39Code,AVMetadataObjectTypeCode39Mod43Code, - AVMetadataObjectTypeEAN13Code,AVMetadataObjectTypeEAN8Code, AVMetadataObjectTypeCode93Code, - AVMetadataObjectTypeCode128Code, AVMetadataObjectTypePDF417Code, AVMetadataObjectTypeQRCode, - AVMetadataObjectTypeAztecCode, AVMetadataObjectTypeDataMatrixCode, AVMetadataObjectTypeInterleaved2of5Code]; - for (NSString* object in supportedBarcodeTypes) { - if ([currentType isEqualToString:object]) { - result = YES; - } - } - return result; -} - -#pragma mark - String Constants For Scanner - -const NSString *offsetForScannerFrame = @"offsetFrame"; -const NSString *heightForScannerFrame = @"frameHeight"; -const NSString *colorForFrame = @"colorForFrame"; -const NSString *isNeedMultipleScanBarcode = @"isNeedMultipleScanBarcode"; - - -@end - diff --git a/ios/ReactNativeCameraKit/CKCameraManager.h b/ios/ReactNativeCameraKit/CKCameraManager.h deleted file mode 100644 index 80362aa8c3..0000000000 --- a/ios/ReactNativeCameraKit/CKCameraManager.h +++ /dev/null @@ -1,15 +0,0 @@ -@import AVFoundation; - -#if __has_include() -#import -#import -#else -#import "RCTViewManager.h" -#import "RCTConvert.h" -#endif - - - -@interface CKCameraManager : RCTViewManager - -@end diff --git a/ios/ReactNativeCameraKit/CKCameraManager.m b/ios/ReactNativeCameraKit/CKCameraManager.m index 885f93e8bc..0de8e2bdc3 100644 --- a/ios/ReactNativeCameraKit/CKCameraManager.m +++ b/ios/ReactNativeCameraKit/CKCameraManager.m @@ -1,83 +1,47 @@ -#import "CKCameraManager.h" -#import "CKCamera.h" +// +// CKCameraManager.m +// ReactNativeCameraKit +// +@import AVFoundation; -@interface CKCameraManager () +#if __has_include() +#import +#import +#else +#import "RCTViewManager.h" +#import "RCTConvert.h" +#endif -@property (nonatomic, strong) CKCamera *camera; - -@end - -@implementation CKCameraManager - -RCT_EXPORT_MODULE() - -- (UIView *)view { - self.camera = [CKCamera new]; - return self.camera; -} +@interface RCT_EXTERN_MODULE(CKCameraManager, RCTViewManager) RCT_EXPORT_VIEW_PROPERTY(cameraType, CKCameraType) -RCT_EXPORT_VIEW_PROPERTY(flashMode, CKCameraFlashMode) -RCT_EXPORT_VIEW_PROPERTY(torchMode, CKCameraTorchMode) -RCT_EXPORT_VIEW_PROPERTY(focusMode, CKCameraFocusMode) -RCT_EXPORT_VIEW_PROPERTY(zoomMode, CKCameraZoomMode) +RCT_EXPORT_VIEW_PROPERTY(flashMode, CKFlashMode) +RCT_EXPORT_VIEW_PROPERTY(torchMode, CKTorchMode) RCT_EXPORT_VIEW_PROPERTY(ratioOverlay, NSString) RCT_EXPORT_VIEW_PROPERTY(ratioOverlayColor, UIColor) +RCT_EXPORT_VIEW_PROPERTY(scanBarcode, BOOL) RCT_EXPORT_VIEW_PROPERTY(onReadCode, RCTDirectEventBlock) -RCT_EXPORT_VIEW_PROPERTY(onOrientationChange, RCTDirectEventBlock) RCT_EXPORT_VIEW_PROPERTY(showFrame, BOOL) +RCT_EXPORT_VIEW_PROPERTY(scanThrottleDelay, NSInteger) RCT_EXPORT_VIEW_PROPERTY(laserColor, UIColor) RCT_EXPORT_VIEW_PROPERTY(frameColor, UIColor) + +RCT_EXPORT_VIEW_PROPERTY(onOrientationChange, RCTDirectEventBlock) RCT_EXPORT_VIEW_PROPERTY(resetFocusTimeout, NSInteger) RCT_EXPORT_VIEW_PROPERTY(resetFocusWhenMotionDetected, BOOL) +RCT_EXPORT_VIEW_PROPERTY(focusMode, CKFocusMode) +RCT_EXPORT_VIEW_PROPERTY(zoomMode, CKZoomMode) -RCT_EXPORT_METHOD(capture:(NSDictionary*)options +RCT_EXTERN_METHOD(capture:(NSDictionary*)options resolve:(RCTPromiseResolveBlock)resolve - reject:(RCTPromiseRejectBlock)reject) { - - [self.camera snapStillImage:options success:^(NSDictionary *imageObject) { - resolve(imageObject); - } onError:^(NSString* error) { - reject(@"capture_error", error, nil); - }]; -} - -RCT_EXPORT_METHOD(setTorchMode:(NSString*)mode) { - AVCaptureTorchMode torchMode; - if([mode isEqualToString:@"on"]) { - torchMode = AVCaptureTorchModeOn; - } else { - torchMode = AVCaptureTorchModeOff; - } - - [self.camera setTorchMode:torchMode ]; -} - -RCT_EXPORT_METHOD(checkDeviceCameraAuthorizationStatus:(RCTPromiseResolveBlock)resolve - reject:(__unused RCTPromiseRejectBlock)reject) { - - - AVAuthorizationStatus authStatus = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo]; - if(authStatus == AVAuthorizationStatusAuthorized) { - resolve(@YES); - } else if(authStatus == AVAuthorizationStatusNotDetermined) { - resolve(@(-1)); - } else { - resolve(@NO); - } -} + reject:(RCTPromiseRejectBlock)reject) -RCT_EXPORT_METHOD(requestDeviceCameraAuthorization:(RCTPromiseResolveBlock)resolve - reject:(__unused RCTPromiseRejectBlock)reject) { - __block NSString *mediaType = AVMediaTypeVideo; +RCT_EXTERN_METHOD(checkDeviceCameraAuthorizationStatus:(RCTPromiseResolveBlock)resolve + reject:(__unused RCTPromiseRejectBlock)reject) - [AVCaptureDevice requestAccessForMediaType:mediaType completionHandler:^(BOOL granted) { - if (resolve) { - resolve(@(granted)); - } - }]; -} +RCT_EXTERN_METHOD(requestDeviceCameraAuthorization:(RCTPromiseResolveBlock)resolve + reject:(__unused RCTPromiseRejectBlock)reject) @end diff --git a/ios/ReactNativeCameraKit/CKCameraOverlayView.h b/ios/ReactNativeCameraKit/CKCameraOverlayView.h deleted file mode 100644 index 83a4e579bb..0000000000 --- a/ios/ReactNativeCameraKit/CKCameraOverlayView.h +++ /dev/null @@ -1,16 +0,0 @@ -#import -#import "CKOverlayObject.h" - -@interface CKCameraOverlayView : UIView - - -@property (nonatomic, strong, readonly) UIView *centerView; -@property (nonatomic, strong, readonly) CKOverlayObject *overlayObject; - - - --(instancetype)initWithFrame:(CGRect)frame ratioString:(NSString*)ratioString overlayColor:(UIColor*)overlayColor; - --(void)setRatio:(NSString*)ratio; - -@end diff --git a/ios/ReactNativeCameraKit/CKCameraOverlayView.m b/ios/ReactNativeCameraKit/CKCameraOverlayView.m deleted file mode 100644 index 2bf596836d..0000000000 --- a/ios/ReactNativeCameraKit/CKCameraOverlayView.m +++ /dev/null @@ -1,100 +0,0 @@ -#import "CKCameraOverlayView.h" - - -@interface CKCameraOverlayView () - -@property (nonatomic, strong, readwrite) CKOverlayObject *overlayObject; -@property (nonatomic, strong) UIView *topView; -@property (nonatomic, strong, readwrite) UIView *centerView; -@property (nonatomic, strong) UIView *bottomView; - - -@end - -@implementation CKCameraOverlayView - - - --(instancetype)initWithFrame:(CGRect)frame ratioString:(NSString*)ratioString overlayColor:(UIColor*)overlayColor { - - self = [super initWithFrame:frame]; - - if (self) { - - self.overlayObject = [[CKOverlayObject alloc] initWithString:ratioString]; - self.topView = [[UIView alloc] initWithFrame:CGRectZero]; - self.centerView = [[UIView alloc] initWithFrame:CGRectZero]; - self.bottomView = [[UIView alloc] initWithFrame:CGRectZero]; - - overlayColor = overlayColor ? overlayColor : [UIColor colorWithRed:0 green:0 blue:0 alpha:0.3]; - - self.topView.backgroundColor = overlayColor; - self.bottomView.backgroundColor = overlayColor; - - [self addSubview:self.topView]; - [self addSubview:self.centerView]; - [self addSubview:self.bottomView]; - - [self setOverlayParts]; - } - - return self; -} - - --(void)setOverlayParts { - - if (self.overlayObject.ratio == 0) return; - - CGSize centerSize = CGSizeZero; - CGSize sideSize = CGSizeZero; - - if (self.overlayObject.width < self.overlayObject.height) { - - centerSize.width = self.frame.size.width; - centerSize.height = self.frame.size.height * self.overlayObject.ratio; - - sideSize.width = centerSize.width; - sideSize.height = (self.frame.size.height - centerSize.height)/2.0; - - self.topView.frame = CGRectMake(0, 0, sideSize.width, sideSize.height); - self.centerView.frame = CGRectMake(0, self.topView.frame.size.height + self.topView.frame.origin.y, centerSize.width, centerSize.height); - self.bottomView.frame = CGRectMake(0, self.centerView.frame.size.height + self.centerView.frame.origin.y, sideSize.width, sideSize.height); - } - else if (self.overlayObject.width > self.overlayObject.height){ - centerSize.width = self.frame.size.width / self.overlayObject.ratio; - centerSize.height = self.frame.size.height; - - sideSize.width = (self.frame.size.width - centerSize.width)/2.0; - sideSize.height = centerSize.height; - - self.topView.frame = CGRectMake(0, 0, sideSize.width, sideSize.height); - self.centerView.frame = CGRectMake(self.topView.frame.size.width + self.topView.frame.origin.x, 0, centerSize.width, centerSize.height); - self.bottomView.frame = CGRectMake(self.centerView.frame.size.width + self.centerView.frame.origin.x, 0, sideSize.width, sideSize.height); - } - else { // ratio is 1:1 - centerSize.width = self.frame.size.width; - centerSize.height = self.frame.size.width; - - sideSize.width = centerSize.width; - sideSize.height = (self.frame.size.height - centerSize.height)/2.0; - - self.topView.frame = CGRectMake(0, 0, sideSize.width, sideSize.height); - self.centerView.frame = CGRectMake(0, self.topView.frame.size.height + self.topView.frame.origin.y, centerSize.width, centerSize.height); - self.bottomView.frame = CGRectMake(0, self.centerView.frame.size.height + self.centerView.frame.origin.y, sideSize.width, sideSize.height); - } -} - - --(void)setRatio:(NSString*)ratio { - self.overlayObject = [[CKOverlayObject alloc] initWithString:ratio]; - -// self.alpha =0; - [UIView animateWithDuration:0.2 animations:^{ - [self setOverlayParts]; - } completion:nil]; - -} - - -@end diff --git a/ios/ReactNativeCameraKit/CKCompressedImage.h b/ios/ReactNativeCameraKit/CKCompressedImage.h deleted file mode 100644 index 311fd453bd..0000000000 --- a/ios/ReactNativeCameraKit/CKCompressedImage.h +++ /dev/null @@ -1,10 +0,0 @@ -#import - -@interface CKCompressedImage : NSObject - -- (instancetype)initWithImage:(UIImage *)image imageQuality:(NSString*)imageQuality; - -@property (nonatomic, readonly) UIImage *image; -@property (nonatomic, readonly) NSData *data; - -@end diff --git a/ios/ReactNativeCameraKit/CKCompressedImage.m b/ios/ReactNativeCameraKit/CKCompressedImage.m deleted file mode 100644 index a453160887..0000000000 --- a/ios/ReactNativeCameraKit/CKCompressedImage.m +++ /dev/null @@ -1,40 +0,0 @@ -#import "CKCompressedImage.h" - -@implementation CKCompressedImage - -- (instancetype)initWithImage:(UIImage *)image imageQuality:(NSString*)imageQuality -{ - if(self = [super init]) { - CGFloat max = 1200.0f; - if ([imageQuality isEqualToString:@"high"]) { - max = 1200.0f; - } - else if ([imageQuality isEqualToString:@"medium"]) { - max = 800.0f; - } - else { - _image = image; - _data = UIImageJPEGRepresentation(image, 1.0f); - } - float actualHeight = image.size.height; - float actualWidth = image.size.width; - - float imgRatio = actualWidth/actualHeight; - - float newHeight = (actualHeight > actualWidth) ? max : max/imgRatio; - float newWidth = (actualHeight > actualWidth) ? max*imgRatio : max; - - - CGRect rect = CGRectMake(0.0, 0.0, newWidth, newHeight); - UIGraphicsBeginImageContext(rect.size); - [image drawInRect:rect]; - _image = UIGraphicsGetImageFromCurrentImageContext(); - UIGraphicsEndImageContext(); - _data = UIImageJPEGRepresentation(_image, 0.85f); - } - - return self; -} - - -@end diff --git a/ios/ReactNativeCameraKit/CKMockPreview.h b/ios/ReactNativeCameraKit/CKMockPreview.h deleted file mode 100644 index c4591cb184..0000000000 --- a/ios/ReactNativeCameraKit/CKMockPreview.h +++ /dev/null @@ -1,19 +0,0 @@ -// -// CKMockPreview.h -// ReactNativeCameraKit -// -// Created by Aaron Grider on 10/20/20. -// - -#import - -NS_ASSUME_NONNULL_BEGIN - -@interface CKMockPreview : UIView - -- (UIImage *)snapshotWithTimestamp:(BOOL)showTimestamp; -- (void)randomize; - -@end - -NS_ASSUME_NONNULL_END diff --git a/ios/ReactNativeCameraKit/CKMockPreview.m b/ios/ReactNativeCameraKit/CKMockPreview.m deleted file mode 100644 index b38bb6efcf..0000000000 --- a/ios/ReactNativeCameraKit/CKMockPreview.m +++ /dev/null @@ -1,135 +0,0 @@ -// -// CKMockPreview.m -// ReactNativeCameraKit -// -// Created by Aaron Grider on 10/20/20. -// - -#import "CKMockPreview.h" - -@implementation CKMockPreview - -- (id)initWithFrame:(CGRect) frame { - self = [super initWithFrame:frame]; - if (self) { - [self commonInit]; - } - return self; -} - -- (void)commonInit { - self.layer.cornerRadius = 10.0f; - self.layer.masksToBounds = YES; -} - -- (void)layoutSubviews { - [super layoutSubviews]; - [self randomize]; -} - -- (void)randomize { - self.layer.backgroundColor = [UIColor colorWithHue:drand48() saturation:1.0 brightness:1.0 alpha:1.0].CGColor; - self.layer.sublayers = nil; - - for (int i = 0; i < 5; i++) { - [self drawBalloon]; - } -} - -- (void)drawBalloon { - int stringLength = 200; - CGFloat radius = [CKMockPreview randomNumberBetween:50 maxNumber:150]; - int x = arc4random_uniform(self.frame.size.width); - int y = arc4random_uniform(self.frame.size.height + radius + stringLength); - int stretch = radius / 3; - - CALayer *balloon = [CALayer layer]; - balloon.frame = CGRectMake(x - radius, y - radius, radius * 2, radius * 2 + stringLength); - - // Ballon main circle - CAShapeLayer *circle = [CAShapeLayer layer]; - double colorHue = drand48(); - - [circle setPath:[[UIBezierPath bezierPathWithOvalInRect:CGRectMake(0, 0, radius * 2, radius * 2 + stretch)] CGPath]]; - [circle setFillColor:[[UIColor colorWithHue:colorHue saturation:1.0 brightness:0.95 alpha:1.0] CGColor]]; - - // Ballon reflection - CAShapeLayer *reflection = [CAShapeLayer layer]; - [reflection setPath:[[UIBezierPath bezierPathWithOvalInRect:CGRectMake(radius / 2, radius / 2, radius * 0.7, radius * 0.7)] CGPath]]; - [reflection setFillColor:[[UIColor colorWithHue:colorHue saturation:1.0 brightness:1.0 alpha:1.0] CGColor]]; - - // Ballon string - CAShapeLayer *line = [CAShapeLayer layer]; - UIBezierPath *linePath= [UIBezierPath bezierPath]; - CGPoint startPoint = CGPointMake(balloon.frame.size.width / 2, radius * 2); - CGPoint endPoint = CGPointMake(balloon.frame.size.width, (radius * 2) + stringLength); - [linePath moveToPoint: startPoint]; - [linePath addQuadCurveToPoint:endPoint controlPoint:CGPointMake(balloon.frame.size.width / 2, radius * 2 + stringLength / 2)]; - line.path = linePath.CGPath; - line.fillColor = nil; - line.strokeColor = [UIColor darkGrayColor].CGColor; - line.opacity = 1.0; - line.lineWidth = radius * 0.05; - - // Add layers - [balloon addSublayer:line]; - [circle addSublayer:reflection]; - [balloon addSublayer:circle]; - - [self.layer addSublayer:balloon]; - - // Apply animation - CABasicAnimation *scale = [CABasicAnimation animationWithKeyPath:@"transform.scale"]; - [scale setFromValue:[NSNumber numberWithFloat:0.7f]]; - [scale setToValue:[NSNumber numberWithFloat:1.0f]]; - [scale setDuration:10.0f]; - [scale setFillMode:kCAFillModeForwards]; - - scale.removedOnCompletion = NO; - scale.autoreverses= YES; - scale.repeatCount = HUGE_VALF; - - CABasicAnimation *move = [CABasicAnimation animationWithKeyPath:@"position.y"]; - [move setFromValue:[NSNumber numberWithFloat:balloon.frame.origin.y]]; - [move setToValue:[NSNumber numberWithFloat: 0 - balloon.frame.size.height]]; - [move setDuration:[CKMockPreview randomNumberBetween:30 maxNumber:100]]; - - move.removedOnCompletion = NO; - move.repeatCount = HUGE_VALF; - - [balloon addAnimation:scale forKey:@"scale"]; - [balloon addAnimation:move forKey:@"move"]; -} - - -- (UIImage *)snapshotWithTimestamp:(BOOL)showTimestamp { - UIGraphicsBeginImageContextWithOptions(self.bounds.size, NO, 0); - [self drawViewHierarchyInRect:self.bounds afterScreenUpdates:NO]; - UIImage *image = UIGraphicsGetImageFromCurrentImageContext(); - - if (showTimestamp) { - NSDate *date = [NSDate date]; - NSDateFormatter *dateformatter = [[NSDateFormatter alloc] init]; - [dateformatter setDateFormat:@"HH:mm:ss"]; - NSString *stringFromDate = [dateformatter stringFromDate:date]; - UIFont *font = [UIFont boldSystemFontOfSize:20]; - - [image drawInRect:CGRectMake(0,0,image.size.width,image.size.height)]; - CGRect rect = CGRectMake(25, 25, image.size.width, image.size.height); - [[UIColor whiteColor] set]; - [stringFromDate drawInRect:CGRectIntegral(rect) withAttributes:[NSDictionary dictionaryWithObject:font forKey:NSFontAttributeName]]; - - image = UIGraphicsGetImageFromCurrentImageContext(); - - UIGraphicsEndImageContext(); - } - - return image; -} - -+ (NSInteger)randomNumberBetween:(NSInteger)min maxNumber:(NSInteger)max -{ - return min + arc4random_uniform((uint32_t)(max - min + 1)); -} - -@end diff --git a/ios/ReactNativeCameraKit/CKOverlayObject.h b/ios/ReactNativeCameraKit/CKOverlayObject.h deleted file mode 100644 index e1f6bf9c6d..0000000000 --- a/ios/ReactNativeCameraKit/CKOverlayObject.h +++ /dev/null @@ -1,13 +0,0 @@ -#import - -@interface CKOverlayObject : NSObject - - -@property (nonatomic, readonly) float width; -@property (nonatomic, readonly) float height; -@property (nonatomic, readonly) float ratio; - --(instancetype)initWithString:(NSString*)str; - - -@end diff --git a/ios/ReactNativeCameraKit/CKOverlayObject.m b/ios/ReactNativeCameraKit/CKOverlayObject.m deleted file mode 100644 index 102db93eae..0000000000 --- a/ios/ReactNativeCameraKit/CKOverlayObject.m +++ /dev/null @@ -1,44 +0,0 @@ -#import "CKOverlayObject.h" - -@interface CKOverlayObject () - -@property (nonatomic, readwrite) float width; -@property (nonatomic, readwrite) float height; -@property (nonatomic, readwrite) float ratio; - -@end - -@implementation CKOverlayObject - --(instancetype)initWithString:(NSString*)str { - - self = [super init]; - - if (self) { - [self commonInit:str]; - } - - return self; -} - --(void)commonInit:(NSString*)str { - - NSArray *array = [str componentsSeparatedByString:@":"]; - if (array.count == 2) { - float height = [array[0] floatValue]; - float width = [array[1] floatValue]; - - if (width != 0 && height != 0) { - self.width = width; - self.height = height; - self.ratio = self.width/self.height; - } - } -} - --(NSString *)description { - return [NSString stringWithFormat:@"width:%f height:%f ratio:%f", self.width, self.height, self.ratio]; -} - - -@end diff --git a/ios/ReactNativeCameraKit/CKTypes+RCTConvert.m b/ios/ReactNativeCameraKit/CKTypes+RCTConvert.m new file mode 100644 index 0000000000..8156fa821f --- /dev/null +++ b/ios/ReactNativeCameraKit/CKTypes+RCTConvert.m @@ -0,0 +1,44 @@ +// +// CKTypes+RCTConvert.m +// ReactNativeCameraKit +// + +#if __has_include() +#import +#import +#else +#import "RCTViewManager.h" +#import "RCTConvert.h" +#endif + +#import "ReactNativeCameraKit-Swift.h" + +@implementation RCTConvert (CKTypes) + +RCT_ENUM_CONVERTER(CKCameraType, (@{ + @"back": @(CKCameraTypeBack), + @"front": @(CKCameraTypeFront) +}), CKCameraTypeBack, integerValue) + +RCT_ENUM_CONVERTER(CKFlashMode, (@{ + @"on": @(CKFlashModeOn), + @"off": @(CKFlashModeOff), + @"auto": @(CKFlashModeAuto) +}), CKFlashModeAuto, integerValue) + +RCT_ENUM_CONVERTER(CKTorchMode, (@{ + @"on": @(CKTorchModeOn), + @"off": @(CKTorchModeOff) +}), CKTorchModeOn, integerValue) + +RCT_ENUM_CONVERTER(CKFocusMode, (@{ + @"on": @(CKFocusModeOn), + @"off": @(CKFocusModeOff) +}), CKFocusModeOn, integerValue) + +RCT_ENUM_CONVERTER(CKZoomMode, (@{ + @"on": @(CKZoomModeOn), + @"off": @(CKZoomModeOff) +}), CKZoomModeOn, integerValue) + +@end diff --git a/ios/ReactNativeCameraKit/CameraManager.swift b/ios/ReactNativeCameraKit/CameraManager.swift new file mode 100644 index 0000000000..51c13c3f3f --- /dev/null +++ b/ios/ReactNativeCameraKit/CameraManager.swift @@ -0,0 +1,46 @@ +// +// CameraManager.swift +// ReactNativeCameraKit +// + +import AVFoundation +import Foundation + +/* + * Class managing the communication between React Native and the native implementation + */ +@objc(CKCameraManager) public class CameraManager: RCTViewManager { + var camera: CameraView! + + override public static func requiresMainQueueSetup() -> Bool { + return true + } + + override public func view() -> UIView! { + camera = CameraView() + + return camera + } + + @objc func capture(_ options: NSDictionary, + resolve: @escaping RCTPromiseResolveBlock, + reject: @escaping RCTPromiseRejectBlock) { + camera.capture(options as! [String: Any], + onSuccess: { resolve($0) }, + onError: { reject("capture_error", $0, nil) }) + } + + @objc func checkDeviceCameraAuthorizationStatus(_ resolve: @escaping RCTPromiseResolveBlock, + reject: @escaping RCTPromiseRejectBlock) { + switch AVCaptureDevice.authorizationStatus(for: .video) { + case .authorized: resolve(true) + case .notDetermined: resolve(-1) + default: resolve(false) + } + } + + @objc func requestDeviceCameraAuthorization(_ resolve: @escaping RCTPromiseResolveBlock, + reject: @escaping RCTPromiseRejectBlock) { + AVCaptureDevice.requestAccess(for: .video, completionHandler: { resolve($0) }) + } +} diff --git a/ios/ReactNativeCameraKit/CameraProtocol.swift b/ios/ReactNativeCameraKit/CameraProtocol.swift new file mode 100644 index 0000000000..b186964e36 --- /dev/null +++ b/ios/ReactNativeCameraKit/CameraProtocol.swift @@ -0,0 +1,28 @@ +// +// CameraProtocol.swift +// ReactNativeCameraKit +// + +import AVFoundation + +protocol CameraProtocol: AnyObject, FocusInterfaceViewDelegate { + var previewView: UIView { get } + + func setup(cameraType: CameraType, supportedBarcodeType: [AVMetadataObject.ObjectType]) + func cameraRemovedFromSuperview() + + func update(pinchScale: CGFloat) + func update(torchMode: TorchMode) + func update(flashMode: FlashMode) + func update(cameraType: CameraType) + func update(onOrientationChange: RCTDirectEventBlock?) + + func isBarcodeScannerEnabled(_ isEnabled: Bool, + supportedBarcodeType: [AVMetadataObject.ObjectType], + onBarcodeRead: ((_ barcode: String) -> Void)?) + func update(scannerFrameSize: CGRect?) + + func capturePicture(onWillCapture: @escaping () -> Void, + onSuccess: @escaping (_ imageData: Data, _ thumbnailData: Data?) -> (), + onError: @escaping (_ message: String) -> ()) +} diff --git a/ios/ReactNativeCameraKit/CameraView.swift b/ios/ReactNativeCameraKit/CameraView.swift new file mode 100644 index 0000000000..69e66454eb --- /dev/null +++ b/ios/ReactNativeCameraKit/CameraView.swift @@ -0,0 +1,319 @@ +// +// CameraView.swift +// ReactNativeCameraKit +// + +import AVFoundation +import UIKit + +/* + * View abtracting the logic unrelated to the actual camera + * Like permission, ratio overlay, focus, zoom gesture, write image, etc + */ +@objc(CKCameraView) +class CameraView: UIView { + private let camera: CameraProtocol + + // Focus + private let focusInterfaceView: FocusInterfaceView + + // scanner + private var lastBarcodeDetectedTime: TimeInterval = 0 + private var scannerInterfaceView: ScannerInterfaceView + private var supportedBarcodeType: [AVMetadataObject.ObjectType] = [.upce, .code39, .code39Mod43, + .ean13, .ean8, .code93, + .code128, .pdf417, .qr, + .aztec, .dataMatrix, .interleaved2of5] + // camera + private var ratioOverlayView: RatioOverlayView? + + // gestures + private var zoomGestureRecognizer: UIPinchGestureRecognizer? + + // props + // camera settings + @objc var cameraType: CameraType = .back + @objc var flashMode: FlashMode = .auto + @objc var torchMode: TorchMode = .off + // ratio overlay + @objc var ratioOverlay: String? + @objc var ratioOverlayColor: UIColor? + // scanner + @objc var scanBarcode = false + @objc var showFrame = false + @objc var onReadCode: RCTDirectEventBlock? + @objc var scanThrottleDelay = 2000 + @objc var frameColor: UIColor? + @objc var laserColor: UIColor? + // other + @objc var onOrientationChange: RCTDirectEventBlock? + @objc var resetFocusTimeout = 0 + @objc var resetFocusWhenMotionDetected = false + @objc var focusMode: FocusMode = .on + @objc var zoomMode: ZoomMode = .on + + // MARK: - Setup + + // This is used to delay camera setup until we have both granted permission & received default props + var hasCameraBeenSetup = false + var hasPropBeenSetup = false { + didSet { + setupCamera() + } + } + var hasPermissionBeenGranted = false { + didSet { + setupCamera() + } + } + + private func setupCamera() { + if (hasPropBeenSetup && hasPermissionBeenGranted && !hasCameraBeenSetup) { + hasCameraBeenSetup = true + camera.setup(cameraType: cameraType, supportedBarcodeType: scanBarcode && onReadCode != nil ? supportedBarcodeType : []) + } + } + + // MARK: Lifecycle + + @available(*, unavailable) + required init?(coder aDecoder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + override init(frame: CGRect) { +#if targetEnvironment(simulator) + camera = SimulatorCamera() +#else + camera = RealCamera() +#endif + + scannerInterfaceView = ScannerInterfaceView(frameColor: .white, laserColor: .red) + focusInterfaceView = FocusInterfaceView() + + super.init(frame: frame) + + addSubview(camera.previewView) + + addSubview(scannerInterfaceView) + scannerInterfaceView.isHidden = true + + addSubview(focusInterfaceView) + focusInterfaceView.delegate = camera + + handleCameraPermission() + } + + override func removeFromSuperview() { + camera.cameraRemovedFromSuperview() + + super.removeFromSuperview() + } + + // MARK: React lifecycle + + override func reactSetFrame(_ frame: CGRect) { + super.reactSetFrame(frame) + + camera.previewView.frame = bounds + + scannerInterfaceView.frame = bounds + // If frame size changes, we have to update the scanner + camera.update(scannerFrameSize: showFrame ? scannerInterfaceView.frameSize : nil) + + focusInterfaceView.frame = bounds + + ratioOverlayView?.frame = bounds + } + + override func removeReactSubview(_ subview: UIView) { + subview.removeFromSuperview() + super.removeReactSubview(subview) + } + + // Called once when all props have been set, then every time one is updated + override func didSetProps(_ changedProps: [String]) { + hasPropBeenSetup = true + + // Camera settings + if changedProps.contains("cameraType") { + camera.update(cameraType: cameraType) + } + if changedProps.contains("flashMode") { + camera.update(flashMode: flashMode) + } + if changedProps.contains("cameraType") || changedProps.contains("torchMode") { + camera.update(torchMode: torchMode) + } + + if changedProps.contains("onOrientationChange") { + camera.update(onOrientationChange: onOrientationChange) + } + + // Ratio overlay + if changedProps.contains("ratioOverlay") { + if let ratioOverlay { + if let ratioOverlayView { + ratioOverlayView.setRatio(ratioOverlay) + } else { + ratioOverlayView = RatioOverlayView(frame: bounds, ratioString: ratioOverlay, overlayColor: ratioOverlayColor) + addSubview(ratioOverlayView!) + } + } else { + ratioOverlayView?.removeFromSuperview() + ratioOverlayView = nil + } + } + + if changedProps.contains("ratioOverlayColor"), let ratioOverlayColor { + ratioOverlayView?.setColor(ratioOverlayColor) + } + + // Scanner + if changedProps.contains("scanBarcode") || changedProps.contains("onReadCode") { + camera.isBarcodeScannerEnabled(scanBarcode, + supportedBarcodeType: supportedBarcodeType, + onBarcodeRead: { [weak self] barcode in self?.onBarcodeRead(barcode: barcode) }) + } + + if changedProps.contains("showFrame") || changedProps.contains("scanBarcode") { + DispatchQueue.main.async { + self.scannerInterfaceView.isHidden = !self.showFrame + + self.camera.update(scannerFrameSize: self.showFrame ? self.scannerInterfaceView.frameSize : nil) + } + } + + if changedProps.contains("laserColor"), let laserColor { + scannerInterfaceView.update(laserColor: laserColor) + } + + if changedProps.contains("frameColor"), let frameColor { + scannerInterfaceView.update(frameColor: frameColor) + } + + // Others + if changedProps.contains("focusMode") { + focusInterfaceView.update(focusMode: focusMode) + } + if changedProps.contains("resetFocusTimeout") { + focusInterfaceView.update(resetFocusTimeout: resetFocusTimeout) + } + if changedProps.contains("resetFocusWhenMotionDetected") { + focusInterfaceView.update(resetFocusWhenMotionDetected: resetFocusWhenMotionDetected) + } + + if changedProps.contains("zoomMode") { + if zoomMode == .on { + if (zoomGestureRecognizer == nil) { + let pinchGesture = UIPinchGestureRecognizer(target: self, action: #selector(handlePinchToZoomRecognizer(_:))) + addGestureRecognizer(pinchGesture) + zoomGestureRecognizer = pinchGesture + } + } else { + if let zoomGestureRecognizer { + removeGestureRecognizer(zoomGestureRecognizer) + self.zoomGestureRecognizer = nil + } + } + } + } + + // MARK: Public + + func capture(_ options: [String: Any], + onSuccess: @escaping (_ imageObject: [String: Any]) -> (), + onError: @escaping (_ error: String) -> ()) { + camera.capturePicture(onWillCapture: { [weak self] in + // Flash/dim preview to indicate shutter action + DispatchQueue.main.async { + self?.camera.previewView.alpha = 0 + UIView.animate(withDuration: 0.35, animations: { + self?.camera.previewView.alpha = 1 + }) + } + }, onSuccess: { [weak self] imageData, thumbnailData in + DispatchQueue.global(qos: .default).async { + self?.writeCaptured(imageData: imageData, thumbnailData: thumbnailData, onSuccess: onSuccess, onError: onError) + + self?.focusInterfaceView.resetFocus() + } + }, onError: onError) + } + + // MARK: - Private Helper + + private func handleCameraPermission() { + switch AVCaptureDevice.authorizationStatus(for: .video) { + case .authorized: + // The user has previously granted access to the camera. + hasPermissionBeenGranted = true + break + case .notDetermined: + // The user has not yet been presented with the option to grant video access. + AVCaptureDevice.requestAccess(for: .video) { [weak self] granted in + if granted { + self?.hasPermissionBeenGranted = true + } + } + default: + // The user has previously denied access. + break + } + } + + private func writeCaptured(imageData: Data, + thumbnailData: Data?, + onSuccess: @escaping (_ imageObject: [String: Any]) -> (), + onError: @escaping (_ error: String) -> ()) { + do { + let temporaryImageFileURL = try saveToTmpFolder(imageData) + var temporaryThumbnailFileURL = try saveToTmpFolder(thumbnailData) + + onSuccess([ + "size": imageData.count, + "uri": temporaryImageFileURL?.description, + "name": temporaryImageFileURL?.lastPathComponent, + "thumb": temporaryThumbnailFileURL?.description ?? "" + ]) + } catch { + let errorMessage = "Error occurred while writing image data to a temporary file: \(error)" + print(errorMessage) + onError(errorMessage) + } + } + + private func saveToTmpFolder(_ data: Data?) throws -> URL? { + guard let data else { return nil } + + let temporaryFileName = ProcessInfo.processInfo.globallyUniqueString + let temporaryFilePath = (NSTemporaryDirectory() as NSString).appendingPathComponent(temporaryFileName).appending(".jpg") + let temporaryFileURL = URL(fileURLWithPath: temporaryFilePath) + + try data.write(to: temporaryFileURL, options: .atomic) + + return temporaryFileURL + } + + private func onBarcodeRead(barcode: String) { + // Throttle barcode detection + let now = Date.timeIntervalSinceReferenceDate + guard lastBarcodeDetectedTime + Double(scanThrottleDelay) / 1000 < now else { + return + } + + lastBarcodeDetectedTime = now + + onReadCode?(["codeStringValue": barcode]) + } + + // MARK: - Gesture selectors + + @objc func handlePinchToZoomRecognizer(_ pinchRecognizer: UIPinchGestureRecognizer) { + if pinchRecognizer.state == .changed { + camera.update(pinchScale: pinchRecognizer.scale) + // Reset scale after every reading to get a one timeframe scale value. Otherwise pinchRecognizer.scale is relative to the start of the gesture + pinchRecognizer.scale = 1.0 + } + } +} diff --git a/ios/ReactNativeCameraKit/FocusInterfaceView.swift b/ios/ReactNativeCameraKit/FocusInterfaceView.swift new file mode 100644 index 0000000000..f873e8c4aa --- /dev/null +++ b/ios/ReactNativeCameraKit/FocusInterfaceView.swift @@ -0,0 +1,211 @@ +// +// FocusInterfaceView.swift +// ReactNativeCameraKit +// + +import UIKit +import AVFoundation + +enum FocusBehavior { + case customFocus(resetFocusWhenMotionDetected: Bool, resetFocus: () -> Void, focusFinished: () -> Void) + case continuousAutoFocus + + var isSubjectAreaChangeMonitoringEnabled: Bool { + switch self { + case let .customFocus(resetFocusWhenMotionDetected, _, _): + return true && resetFocusWhenMotionDetected + case .continuousAutoFocus: + return false + } + } + + var avFocusMode: AVCaptureDevice.FocusMode { + switch self { + case .customFocus: + return .autoFocus + case .continuousAutoFocus: + return .continuousAutoFocus + } + } + + var exposureMode: AVCaptureDevice.ExposureMode { + switch self { + case .customFocus: + return .autoExpose + case .continuousAutoFocus: + return .continuousAutoExposure + } + } +} + +protocol FocusInterfaceViewDelegate: AnyObject { + func focus(at touchPoint: CGPoint, focusBehavior: FocusBehavior) +} + +/* + * Full screen focus interface + */ +class FocusInterfaceView: UIView { + weak var delegate: FocusInterfaceViewDelegate? + + private var resetFocusTimeout = 0 + private var resetFocusWhenMotionDetected = false + + private let focusView: UIView = UIView(frame: .zero) + private var hideFocusViewTimer: Timer? + private var focusResetTimer: Timer? + private var startFocusResetTimerAfterFocusing: Bool = false + private var tapToFocusEngaged: Bool = false + + private var focusGestureRecognizer: UITapGestureRecognizer? + + // MARK: - Lifecycle + + override init(frame: CGRect) { + super.init(frame: frame) + + focusView.backgroundColor = .clear + focusView.layer.borderColor = UIColor.yellow.cgColor + focusView.layer.borderWidth = 1 + focusView.isHidden = true + addSubview(focusView) + + isUserInteractionEnabled = true + } + + @available(*, unavailable) + required init?(coder aDecoder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + // MARK: - Public + + func update(focusMode: FocusMode) { + if focusMode == .on { + if (focusGestureRecognizer == nil) { + let tapGesture = UITapGestureRecognizer(target: self, action: #selector(focusAndExposeTap(_:))) + addGestureRecognizer(tapGesture) + focusGestureRecognizer = tapGesture + } + } else { + if let focusGestureRecognizer { + removeGestureRecognizer(focusGestureRecognizer) + self.focusGestureRecognizer = nil + } + } + } + + func update(resetFocusTimeout: Int) { + self.resetFocusTimeout = resetFocusTimeout + } + + func update(resetFocusWhenMotionDetected: Bool) { + self.resetFocusWhenMotionDetected = resetFocusWhenMotionDetected + } + + func focusFinished() { + if startFocusResetTimerAfterFocusing, resetFocusTimeout > 0 { + startFocusResetTimerAfterFocusing = false + + // Disengage manual focus after focusTimeout milliseconds + let focusTimeoutSeconds = TimeInterval(self.resetFocusTimeout) / 1000 + focusResetTimer = Timer.scheduledTimer(withTimeInterval: focusTimeoutSeconds, + repeats: false) { [weak self] _ in + self?.resetFocus() + } + } + } + + func resetFocus() { + if let focusResetTimer { + focusResetTimer.invalidate() + self.focusResetTimer = nil + } + + // Resetting focus to continuous focus, so not interested in resetting anymore + startFocusResetTimerAfterFocusing = false + + // Avoid showing reset-focus animation after each photo capture + if !tapToFocusEngaged { + return + } + tapToFocusEngaged = false + + DispatchQueue.main.async { + let layerCenter = self.center + + // Reset current camera focus + self.delegate?.focus(at: layerCenter, focusBehavior: .continuousAutoFocus) + + // Create animation to indicate the new focus location + let halfDiagonal: CGFloat = 123 + let halfDiagonalAnimation = halfDiagonal * 2 + + let focusViewFrame = CGRect(x: layerCenter.x - (halfDiagonal / 2), + y: layerCenter.y - (halfDiagonal / 2), + width: halfDiagonal, + height: halfDiagonal) + let focusViewFrameForAnimation = CGRect(x: layerCenter.x - (halfDiagonalAnimation / 2), + y: layerCenter.y - (halfDiagonalAnimation / 2), + width: halfDiagonalAnimation, + height: halfDiagonalAnimation) + + self.focusView.alpha = 0 + self.focusView.isHidden = false + self.focusView.frame = focusViewFrameForAnimation + + UIView.animate(withDuration: 0.2, animations: { + self.focusView.frame = focusViewFrame + self.focusView.alpha = 1 + }) { _ in + self.hideFocusViewTimer?.invalidate() + self.hideFocusViewTimer = Timer.scheduledTimer(withTimeInterval: 2, repeats: false) { [weak self] _ in + guard let self else { return } + UIView.animate(withDuration: 0.2, animations: { + self.focusView.alpha = 0 + }) { _ in + self.focusView.isHidden = true + } + } + } + } + } + + // MARK: - Gesture selectors + + @objc func focusAndExposeTap(_ gestureRecognizer: UIGestureRecognizer) { + let touchPoint = gestureRecognizer.location(in: self) + delegate?.focus(at: touchPoint, + focusBehavior: .customFocus(resetFocusWhenMotionDetected: resetFocusWhenMotionDetected, + resetFocus: resetFocus, + focusFinished: focusFinished)) + + // Disengage manual focus once focusing finishing (if focusTimeout > 0) + // See [self observeValueForKeyPath] + focusResetTimer?.invalidate() + hideFocusViewTimer?.invalidate() + startFocusResetTimerAfterFocusing = true + tapToFocusEngaged = true + + // Animate focus rectangle + let halfDiagonal: CGFloat = 73 + let halfDiagonalAnimation = halfDiagonal * 2 + + let focusViewFrame = CGRect(x: touchPoint.x - (halfDiagonal / 2), + y: touchPoint.y - (halfDiagonal / 2), + width: halfDiagonal, + height: halfDiagonal) + + focusView.alpha = 0 + focusView.isHidden = false + focusView.frame = CGRect(x: touchPoint.x - (halfDiagonalAnimation / 2), + y: touchPoint.y - (halfDiagonalAnimation / 2), + width: halfDiagonalAnimation, + height: halfDiagonalAnimation) + + UIView.animate(withDuration: 0.2, animations: { + self.focusView.frame = focusViewFrame + self.focusView.alpha = 1 + }) + } +} diff --git a/ios/ReactNativeCameraKit/PhotoCaptureDelegate.swift b/ios/ReactNativeCameraKit/PhotoCaptureDelegate.swift new file mode 100644 index 0000000000..42b4a6eeab --- /dev/null +++ b/ios/ReactNativeCameraKit/PhotoCaptureDelegate.swift @@ -0,0 +1,55 @@ +// +// PhotoCaptureDelegate.swift +// ReactNativeCameraKit +// + +import AVFoundation + +/* + * AVCapturePhotoCapture is using a delegation pattern, this class makes it more convenient with a closure pattern. + */ +class PhotoCaptureDelegate: NSObject, AVCapturePhotoCaptureDelegate { + private(set) var requestedPhotoSettings: AVCapturePhotoSettings + + private let onWillCapture: () -> Void + private let onCaptureSuccess: (_ uniqueID: Int64, _ imageData: Data, _ thumbnailData: Data?) -> Void + private let onCaptureError: (_ uniqueID: Int64, _ message: String) -> Void + + init(with requestedPhotoSettings: AVCapturePhotoSettings, + onWillCapture: @escaping () -> Void, + onCaptureSuccess: @escaping (_ uniqueID: Int64, _ imageData: Data, _ thumbnailData: Data?) -> Void, + onCaptureError: @escaping (_ uniqueID: Int64, _ errorMessage: String) -> Void) { + self.requestedPhotoSettings = requestedPhotoSettings + self.onWillCapture = onWillCapture + self.onCaptureSuccess = onCaptureSuccess + self.onCaptureError = onCaptureError + } + + // MARK: - AVCapturePhotoCaptureDelegate + + func photoOutput(_ output: AVCapturePhotoOutput, willCapturePhotoFor resolvedSettings: AVCaptureResolvedPhotoSettings) { + onWillCapture() + } + + func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) { + if let error = error { + print("Could not capture still image: \(error)") + onCaptureError(requestedPhotoSettings.uniqueID, "Could not capture still image") + return + } + + guard let imageData = photo.fileDataRepresentation() else { + onCaptureError(requestedPhotoSettings.uniqueID, "Could not capture still image") + return + } + + var thumbnailData: Data? = nil + if let previewPixelBuffer = photo.previewPixelBuffer { + let ciImage = CIImage(cvPixelBuffer: previewPixelBuffer) + let uiImage = UIImage(ciImage: ciImage) + thumbnailData = uiImage.jpegData(compressionQuality: 0.7) + } + + onCaptureSuccess(requestedPhotoSettings.uniqueID, imageData, thumbnailData) + } +} diff --git a/ios/ReactNativeCameraKit/RatioOverlayView.swift b/ios/ReactNativeCameraKit/RatioOverlayView.swift new file mode 100644 index 0000000000..f65cc7a414 --- /dev/null +++ b/ios/ReactNativeCameraKit/RatioOverlayView.swift @@ -0,0 +1,163 @@ +// +// RatioOverlayView.swift +// ReactNativeCameraKit +// + +import UIKit + +struct RatioOverlayData: CustomStringConvertible { + let width: Float + let height: Float + let ratio: Float + + init(from inputString: String) { + let values = inputString.split(separator: ":") + + if values.count == 2, + let inputHeight = Float(values[0]), + let inputWidth = Float(values[1]), + inputHeight != 0, + inputWidth != 0 { + height = inputHeight + width = inputWidth + ratio = width / height + } else { + height = 0 + width = 0 + ratio = 0 + } + } + + // MARK: CustomStringConvertible + + var description: String { + return "height:\(height) width:\(width) ratio:\(ratio)" + } +} + +/* + * Full screen overlay that can appear on top of the camera as an hint for the expected ratio + */ +class RatioOverlayView: UIView { + private var ratioData: RatioOverlayData? + + private let topView: UIView = UIView() + private let bottomView: UIView = UIView() + + // MARK: - Lifecycle + + init(frame: CGRect, ratioString: String, overlayColor: UIColor?) { + super.init(frame: frame) + + isUserInteractionEnabled = false + + let color = overlayColor ?? UIColor.black.withAlphaComponent(0.3) + setColor(color) + + addSubview(topView) + addSubview(bottomView) + + setRatio(ratioString) + } + + @available(*, unavailable) + required init?(coder aDecoder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + override func layoutSubviews() { + super.layoutSubviews() + + setOverlayParts() + } + + // MARK: - Public + + func setRatio(_ ratioString: String) { + ratioData = RatioOverlayData(from: ratioString) + + UIView.animate(withDuration: 0.2) { + self.setOverlayParts() + } + } + + func setColor(_ color: UIColor) { + topView.backgroundColor = color + bottomView.backgroundColor = color + } + + // MARK: - Private + + private func setOverlayParts() { + guard let ratioData, ratioData.ratio != 0 else { + isHidden = true + + return + } + + isHidden = false + + var centerSize = CGSize.zero + var sideSize = CGSize.zero + var centerFrame: CGRect + + if ratioData.width < ratioData.height { + centerSize.width = frame.size.width + centerSize.height = frame.size.height * CGFloat(ratioData.ratio) + + sideSize.width = centerSize.width + sideSize.height = (frame.size.height - centerSize.height) / 2.0 + + topView.frame = CGRect(x: 0, + y: 0, + width: sideSize.width, + height: sideSize.height) + centerFrame = CGRect(x: 0, + y: topView.frame.size.height + topView.frame.origin.y, + width: centerSize.width, + height: centerSize.height) + bottomView.frame = CGRect(x: 0, + y: centerFrame.size.height + centerFrame.origin.y, + width: sideSize.width, + height: sideSize.height) + } else if ratioData.width > ratioData.height { + centerSize.width = frame.size.width / CGFloat(ratioData.ratio) + centerSize.height = frame.size.height + + sideSize.width = (frame.size.width - centerSize.width) / 2.0 + sideSize.height = centerSize.height + + topView.frame = CGRect(x: 0, + y: 0, + width: sideSize.width, + height: sideSize.height) + centerFrame = CGRect(x: topView.frame.size.width + topView.frame.origin.x, + y: 0, + width: centerSize.width, + height: centerSize.height) + bottomView.frame = CGRect(x: centerFrame.size.width + centerFrame.origin.x, + y: 0, + width: sideSize.width, + height: sideSize.height) + } else { // ratio is 1:1 + centerSize.width = frame.size.width + centerSize.height = frame.size.width + + sideSize.width = centerSize.width + sideSize.height = (frame.size.height - centerSize.height) / 2.0 + + topView.frame = CGRect(x: 0, + y: 0, + width: sideSize.width, + height: sideSize.height) + centerFrame = CGRect(x: 0, + y: topView.frame.size.height + topView.frame.origin.y, + width: centerSize.width, + height: centerSize.height) + bottomView.frame = CGRect(x: 0, + y: centerFrame.size.height + centerFrame.origin.y, + width: sideSize.width, + height: sideSize.height) + } + } +} diff --git a/ios/ReactNativeCameraKit/ReactNativeCameraKit-Bridging-Header.h b/ios/ReactNativeCameraKit/ReactNativeCameraKit-Bridging-Header.h new file mode 100644 index 0000000000..a80a2d3fc1 --- /dev/null +++ b/ios/ReactNativeCameraKit/ReactNativeCameraKit-Bridging-Header.h @@ -0,0 +1,17 @@ +// +// Use this file to import your target's public headers that you would like to expose to Swift. +// + +#if __has_include() +#import +#import +#import +#import +#import +#else +#import "RCTBridgeModule.h" +#import "RCTViewManager.h" +#import "RCTConvert.h" +#import "RCTEventEmitter.h" +#import "UIView+React.h" +#endif diff --git a/ios/ReactNativeCameraKit/ReactNativeCameraKit.h b/ios/ReactNativeCameraKit/ReactNativeCameraKit.h new file mode 100644 index 0000000000..65c49f3270 --- /dev/null +++ b/ios/ReactNativeCameraKit/ReactNativeCameraKit.h @@ -0,0 +1,10 @@ +// +// ReactNativeCameraKit.h +// ReactNativeCameraKit +// + +#ifndef ReactNativeCameraKit_h +#define ReactNativeCameraKit_h + + +#endif /* ReactNativeCameraKit_h */ diff --git a/ios/ReactNativeCameraKit/RealCamera.swift b/ios/ReactNativeCameraKit/RealCamera.swift new file mode 100644 index 0000000000..e76a9c076a --- /dev/null +++ b/ios/ReactNativeCameraKit/RealCamera.swift @@ -0,0 +1,592 @@ +// +// RealCamera.swift +// ReactNativeCameraKit +// + +import AVFoundation +import UIKit +import CoreMotion + +/* + * Real camera implementation that uses AVFoundation + */ +class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelegate { + var previewView: UIView { cameraPreview } + + private let cameraPreview = RealPreviewView(frame: .zero) + private let session = AVCaptureSession() + // Communicate with the session and other session objects on this queue. + private let sessionQueue = DispatchQueue(label: "session queue") + + // utilities + private var setupResult: SetupResult = .notStarted + private var isSessionRunning: Bool = false + private var backgroundRecordingId: UIBackgroundTaskIdentifier = .invalid + + private var videoDeviceInput: AVCaptureDeviceInput? + private let photoOutput = AVCapturePhotoOutput() + private let metadataOutput = AVCaptureMetadataOutput() + + private var flashMode: FlashMode = .auto + private var torchMode: TorchMode = .off + private var resetFocus: (() -> Void)? + private var focusFinished: (() -> Void)? + private var onBarcodeRead: ((_ barcode: String) -> Void)? + private var scannerFrameSize: CGRect? = nil + private var onOrientationChange: RCTDirectEventBlock? + + private var deviceOrientation = UIDeviceOrientation.unknown + private var motionManager: CMMotionManager? + + // KVO observation + private var adjustingFocusObservation: NSKeyValueObservation? + + // Keep delegate objects in memory to avoid collecting them before photo capturing finishes + private var inProgressPhotoCaptureDelegates = [Int64: PhotoCaptureDelegate]() + + // MARK: - Lifecycle + + override init() { + super.init() + + // In addition to using accelerometer to determine REAL orientation + // we also listen to UI orientation changes (UIDevice does not report rotation if orientation lock is on, so photos aren't rotated correctly) + // When UIDevice reports rotation to the left, UI is rotated right to compensate, but that means we need to re-rotate left to make camera appear correctly (see self.uiOrientationChanged) + UIDevice.current.beginGeneratingDeviceOrientationNotifications() + NotificationCenter.default.addObserver(forName: UIDevice.orientationDidChangeNotification, + object: UIDevice.current, + queue: nil, + using: { [weak self] notification in self?.uiOrientationChanged(notification: notification) }) + } + + @available(*, unavailable) + required init?(coder aDecoder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + func cameraRemovedFromSuperview() { + sessionQueue.async { + if self.setupResult == .success { + self.session.stopRunning() + self.removeObservers() + } + } + + motionManager?.stopAccelerometerUpdates() + + NotificationCenter.default.removeObserver(self, name: UIDevice.orientationDidChangeNotification, object: UIDevice.current) + + UIDevice.current.endGeneratingDeviceOrientationNotifications() + } + + deinit { + removeObservers() + } + + // MARK: - Public + + func setup(cameraType: CameraType, supportedBarcodeType: [AVMetadataObject.ObjectType]) { + DispatchQueue.main.async { + self.cameraPreview.session = self.session + self.cameraPreview.previewLayer.videoGravity = .resizeAspect + var interfaceOrientation: UIInterfaceOrientation + if #available(iOS 13.0, *) { + interfaceOrientation = self.previewView.window!.windowScene!.interfaceOrientation + } else { + interfaceOrientation = UIApplication.shared.statusBarOrientation + } + self.cameraPreview.previewLayer.connection?.videoOrientation = self.videoOrientation(from: interfaceOrientation) + } + + self.initializeMotionManager() + + // Setup the capture session. + // In general, it is not safe to mutate an AVCaptureSession or any of its inputs, outputs, or connections from multiple threads at the same time. + // Why not do all of this on the main queue? + // Because -[AVCaptureSession startRunning] is a blocking call which can take a long time. We dispatch session setup to the sessionQueue + // so that the main queue isn't blocked, which keeps the UI responsive. + sessionQueue.async { + self.setupResult = self.setupCaptureSession(cameraType: cameraType, supportedBarcodeType: supportedBarcodeType) + + self.addObservers() + + if self.setupResult == .success { + self.session.startRunning() + + // We need to reapply the configuration after starting the camera + self.update(torchMode: self.torchMode) + } + } + } + + func update(pinchScale: CGFloat) { + guard !pinchScale.isNaN else { return } + + sessionQueue.async { + guard let videoDevice = self.videoDeviceInput?.device else { return } + + do { + try videoDevice.lockForConfiguration() + + let desiredZoomFactor = videoDevice.videoZoomFactor * pinchScale + let maxZoomFactor = min(20, videoDevice.maxAvailableVideoZoomFactor) + videoDevice.videoZoomFactor = max(1.0, min(desiredZoomFactor, maxZoomFactor)) + + videoDevice.unlockForConfiguration() + } catch { + print("Error setting zoom factor: \(error)") + } + } + } + + func focus(at touchPoint: CGPoint, focusBehavior: FocusBehavior) { + DispatchQueue.main.async { + let devicePoint = self.cameraPreview.previewLayer.captureDevicePointConverted(fromLayerPoint: touchPoint) + + self.sessionQueue.async { + guard let videoDevice = self.videoDeviceInput?.device else { return } + + if case let .customFocus(_, resetFocus, focusFinished) = focusBehavior { + self.resetFocus = resetFocus + self.focusFinished = focusFinished + } else { + self.resetFocus = nil + self.focusFinished = nil + } + + do { + try videoDevice.lockForConfiguration() + + if videoDevice.isFocusPointOfInterestSupported && videoDevice.isFocusModeSupported(focusBehavior.avFocusMode) { + videoDevice.focusPointOfInterest = devicePoint + videoDevice.focusMode = focusBehavior.avFocusMode + } + + if videoDevice.isExposurePointOfInterestSupported && videoDevice.isExposureModeSupported(focusBehavior.exposureMode) { + videoDevice.exposurePointOfInterest = devicePoint + videoDevice.exposureMode = focusBehavior.exposureMode + } + + videoDevice.isSubjectAreaChangeMonitoringEnabled = focusBehavior.isSubjectAreaChangeMonitoringEnabled + + videoDevice.unlockForConfiguration() + } catch { + print("Error setting focus: \(error)") + } + } + } + } + + func update(onOrientationChange: RCTDirectEventBlock?) { + self.onOrientationChange = onOrientationChange + } + + func update(torchMode: TorchMode) { + self.torchMode = torchMode + + sessionQueue.async { + guard let videoDevice = self.videoDeviceInput?.device, videoDevice.torchMode != torchMode.avTorchMode else { return } + + if videoDevice.isTorchModeSupported(torchMode.avTorchMode) && videoDevice.hasTorch { + do { + try videoDevice.lockForConfiguration() + + videoDevice.torchMode = torchMode.avTorchMode + + videoDevice.unlockForConfiguration() + } catch { + print("Error setting torch mode: \(error)") + } + } + } + } + + func update(flashMode: FlashMode) { + self.flashMode = flashMode + } + + func update(cameraType: CameraType) { + sessionQueue.async { + if self.videoDeviceInput?.device.position == cameraType.avPosition { + return + } + + // Avoid chaining device inputs when camera input is denied by the user, since both front and rear vido input devices will be nil + guard self.setupResult == .success, + let currentViewDeviceInput = self.videoDeviceInput, + let videoDevice = self.getBestDevice(for: cameraType), + let videoDeviceInput = try? AVCaptureDeviceInput(device: videoDevice) else { + return + } + + self.removeObservers() + self.session.beginConfiguration() + + // Remove the existing device input first, since using the front and back camera simultaneously is not supported. + self.session.removeInput(currentViewDeviceInput) + + if self.session.canAddInput(videoDeviceInput) { + self.session.addInput(videoDeviceInput) + videoDevice.videoZoomFactor = self.wideAngleZoomFactor(for: videoDevice) + self.videoDeviceInput = videoDeviceInput + } else { + // If it fails, put back current camera + self.session.addInput(currentViewDeviceInput) + } + + self.session.commitConfiguration() + self.addObservers() + + // We need to reapply the configuration after reloading the camera + self.update(torchMode: self.torchMode) + } + } + + func capturePicture(onWillCapture: @escaping () -> Void, + onSuccess: @escaping (_ imageData: Data, _ thumbnailData: Data?) -> Void, + onError: @escaping (_ message: String) -> Void) { + /* + Retrieve the video preview layer's video orientation on the main queue before + entering the session queue. Do this to ensure that UI elements are accessed on + the main thread and session configuration is done on the session queue. + */ + DispatchQueue.main.async { + let videoPreviewLayerOrientation = self.videoOrientation(from: self.deviceOrientation) ?? self.cameraPreview.previewLayer.connection?.videoOrientation + + self.sessionQueue.async { + if let photoOutputConnection = self.photoOutput.connection(with: .video), let videoPreviewLayerOrientation { + photoOutputConnection.videoOrientation = videoPreviewLayerOrientation + } + + let settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg]) + settings.isAutoStillImageStabilizationEnabled = true + + if self.videoDeviceInput?.device.isFlashAvailable == true { + settings.flashMode = self.flashMode.avFlashMode + } + + let photoCaptureDelegate = PhotoCaptureDelegate( + with: settings, + onWillCapture: onWillCapture, + onCaptureSuccess: { uniqueID, imageData, thumbnailData in + self.inProgressPhotoCaptureDelegates[uniqueID] = nil + + onSuccess(imageData, thumbnailData) + }, + onCaptureError: { uniqueID, errorMessage in + self.inProgressPhotoCaptureDelegates[uniqueID] = nil + onError(errorMessage) + } + ) + + self.inProgressPhotoCaptureDelegates[photoCaptureDelegate.requestedPhotoSettings.uniqueID] = photoCaptureDelegate + self.photoOutput.capturePhoto(with: settings, delegate: photoCaptureDelegate) + } + } + } + + func isBarcodeScannerEnabled(_ isEnabled: Bool, + supportedBarcodeType: [AVMetadataObject.ObjectType], + onBarcodeRead: ((_ barcode: String) -> Void)?) { + self.onBarcodeRead = onBarcodeRead + + sessionQueue.async { + let newTypes: [AVMetadataObject.ObjectType] + if isEnabled && onBarcodeRead != nil { + let availableTypes = self.metadataOutput.availableMetadataObjectTypes + newTypes = supportedBarcodeType.filter { type in availableTypes.contains(type) } + } else { + newTypes = [] + } + + if self.metadataOutput.metadataObjectTypes != newTypes { + self.metadataOutput.metadataObjectTypes = newTypes + + // Setting metadataObjectTypes reloads the camera, we need to reapply the configuration + self.update(torchMode: self.torchMode) + } + } + } + + func update(scannerFrameSize: CGRect?) { + guard self.scannerFrameSize != scannerFrameSize else { return } + + self.scannerFrameSize = scannerFrameSize + + self.sessionQueue.async { + if !self.session.isRunning { + return + } + + DispatchQueue.main.async { + let visibleRect = scannerFrameSize != nil && scannerFrameSize != .zero ? self.cameraPreview.previewLayer.metadataOutputRectConverted(fromLayerRect: scannerFrameSize!) : nil + + self.sessionQueue.async { + if (self.metadataOutput.rectOfInterest == visibleRect) { + return + } + + self.metadataOutput.rectOfInterest = visibleRect ?? CGRect(x: 0, y: 0, width: 1, height: 1) + // We need to reapply the configuration after touching the metadataOutput + self.update(torchMode: self.torchMode) + } + } + } + } + + // MARK: - AVCaptureMetadataOutputObjectsDelegate + + func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) { + // Try to retrieve the barcode from the metadata extracted + guard let machineReadableCodeObject = metadataObjects.first as? AVMetadataMachineReadableCodeObject, + let codeStringValue = machineReadableCodeObject.stringValue else { + return + } + + onBarcodeRead?(codeStringValue) + } + + // MARK: - Private + + private func videoOrientation(from deviceOrientation: UIDeviceOrientation) -> AVCaptureVideoOrientation? { + // Device orientation counter-rotate interface when in landscapeLeft/Right so it appears level + // (note how landscapeLeft sets landscapeRight) + switch deviceOrientation { + case .portrait: + return .portrait + case .portraitUpsideDown: + return .portraitUpsideDown + case .landscapeLeft: + return .landscapeRight + case .landscapeRight: + return .landscapeLeft + case .faceUp, .faceDown, .unknown: return nil + @unknown default: return nil + } + } + + private func videoOrientation(from interfaceOrientation: UIInterfaceOrientation) -> AVCaptureVideoOrientation { + switch interfaceOrientation { + case .portrait: + return .portrait + case .portraitUpsideDown: + return .portraitUpsideDown + case .landscapeLeft: + return .landscapeLeft + case .landscapeRight: + return .landscapeRight + case .unknown: return .portrait + @unknown default: return .portrait + } + } + + private func getBestDevice(for cameraType: CameraType) -> AVCaptureDevice? { + if #available(iOS 13.0, *) { + if let device = AVCaptureDevice.default(.builtInTripleCamera, for: .video, position: cameraType.avPosition) { + return device + } + } + if let device = AVCaptureDevice.default(.builtInDualCamera, for: .video, position: cameraType.avPosition) { + return device + } + if let device = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: cameraType.avPosition) { + return device + } + return nil + } + + private func setupCaptureSession(cameraType: CameraType, + supportedBarcodeType: [AVMetadataObject.ObjectType]) -> SetupResult { + guard let videoDevice = self.getBestDevice(for: cameraType), + let videoDeviceInput = try? AVCaptureDeviceInput(device: videoDevice) else { + return .sessionConfigurationFailed + } + + session.beginConfiguration() + + session.sessionPreset = .photo + + if session.canAddInput(videoDeviceInput) { + session.addInput(videoDeviceInput) + videoDevice.videoZoomFactor = wideAngleZoomFactor(for: videoDevice) + self.videoDeviceInput = videoDeviceInput + } else { + return .sessionConfigurationFailed + } + + + if session.canAddOutput(photoOutput) { + session.addOutput(photoOutput) + + if let photoOutputConnection = self.photoOutput.connection(with: .video) { + if photoOutputConnection.isVideoStabilizationSupported { + photoOutputConnection.preferredVideoStabilizationMode = .auto + } + } + } else { + return .sessionConfigurationFailed + } + + if self.session.canAddOutput(metadataOutput) { + self.session.addOutput(metadataOutput) + metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main) + + let availableTypes = self.metadataOutput.availableMetadataObjectTypes + let filteredTypes = supportedBarcodeType.filter { type in availableTypes.contains(type) } + metadataOutput.metadataObjectTypes = filteredTypes + } + + session.commitConfiguration() + + return .success + } + + private func wideAngleZoomFactor(for videoDevice: AVCaptureDevice) -> CGFloat { + // Devices that have multiple physical cameras are binded behind one virtual camera input. The zoom factor defines what physical camera it actually uses + // Find the 'normal' zoom factor, which on the physical camera defaults to the wide angle + if #available(iOS 13.0, *) { + if let indexOfWideAngle = videoDevice.constituentDevices.firstIndex(where: { $0.deviceType == .builtInWideAngleCamera }) { + // .virtualDeviceSwitchOverVideoZoomFactors has the .constituentDevices zoom factor which borders the NEXT device + // so we grab the one PRIOR to the wide angle to get the wide angle's zoom factor + return videoDevice.virtualDeviceSwitchOverVideoZoomFactors[indexOfWideAngle - 1].doubleValue + } + } + + return 1.0 + } + + // MARK: - Private device orientation from accelerometer + + private func initializeMotionManager() { + motionManager = CMMotionManager() + motionManager?.accelerometerUpdateInterval = 0.2 + motionManager?.gyroUpdateInterval = 0.2 + motionManager?.startAccelerometerUpdates(to: OperationQueue(), withHandler: { [weak self] (accelerometerData, error) -> Void in + guard error == nil else { + print("\(error!)") + return + } + guard let accelerometerData else { + print("no acceleration data") + return + } + + guard let newOrientation = self?.deviceOrientation(from: accelerometerData.acceleration), + newOrientation != self?.deviceOrientation else { + return + } + + self?.deviceOrientation = newOrientation + self?.onOrientationChange?(["orientation": Orientation.init(from: newOrientation)!.rawValue]) + }) + } + + private func deviceOrientation(from acceleration: CMAcceleration) -> UIDeviceOrientation? { + let threshold = 0.75 + if acceleration.x >= threshold { + return .landscapeRight + } else if acceleration.x <= -threshold { + return .landscapeLeft + } else if acceleration.y <= -threshold { + return .portrait + } else if acceleration.y >= threshold { + return .portraitUpsideDown + } else { + // Device is not clearly pointing in either direction + // (e.g. it's flat on the table, so stick with the same orientation) + return nil + } + } + + // MARK: Private observers + + private func addObservers() { + guard adjustingFocusObservation == nil else { return } + + adjustingFocusObservation = videoDeviceInput?.device.observe(\.isAdjustingFocus, + options: .new, + changeHandler: { [weak self] device, change in + guard let self, let isFocusing = change.newValue else { return } + + self.isAdjustingFocus(isFocusing: isFocusing) + }) + + NotificationCenter.default.addObserver(forName: .AVCaptureDeviceSubjectAreaDidChange, + object: videoDeviceInput?.device, + queue: nil, + using: { [weak self] notification in self?.subjectAreaDidChange(notification: notification) }) + NotificationCenter.default.addObserver(forName: .AVCaptureSessionRuntimeError, + object: session, + queue: nil, + using: { [weak self] notification in self?.sessionRuntimeError(notification: notification) }) + NotificationCenter.default.addObserver(forName: .AVCaptureSessionWasInterrupted, + object: session, + queue: nil, + using: { [weak self] notification in self?.sessionWasInterrupted(notification: notification) }) + + } + + private func removeObservers() { + NotificationCenter.default.removeObserver(self) + + adjustingFocusObservation?.invalidate() + adjustingFocusObservation = nil + } + + private func isAdjustingFocus(isFocusing: Bool) { + if !isFocusing { + focusFinished?() + } + } + + private func subjectAreaDidChange(notification: Notification) { + resetFocus?() + } + + private func uiOrientationChanged(notification: Notification) { + guard let device = notification.object as? UIDevice, + let videoOrientation = videoOrientation(from: device.orientation) else { + return + } + + self.cameraPreview.previewLayer.connection?.videoOrientation = videoOrientation + } + + private func sessionRuntimeError(notification: Notification) { + guard let error = notification.userInfo?[AVCaptureSessionErrorKey] as? AVError else { return } + + print("Capture session runtime error: \(error)") + + // Automatically try to restart the session running if media services were reset and the last start running succeeded. + if error.code == .mediaServicesWereReset { + sessionQueue.async { + if self.isSessionRunning { + self.session.startRunning() + self.isSessionRunning = self.session.isRunning + } + } + } + // Otherwise, enable the user to try to resume the session running. + // FIXME: Missing showResumeButton + } + + private func sessionWasInterrupted(notification: Notification) { + // In some scenarios we want to enable the user to resume the session running. + // For example, if music playback is initiated via control center while using AVCam, + // then the user can let AVCam resume the session running, which will stop music playback. + // Note that stopping music playback in control center will not automatically resume the session running. + // Also note that it is not always possible to resume, see -[resumeInterruptedSession:]. + var showResumeButton = false + + if let reasonValue = notification.userInfo?[AVCaptureSessionInterruptionReasonKey] as? Int, + let reason = AVCaptureSession.InterruptionReason(rawValue: reasonValue) { + print("Capture session was interrupted with reason \(reason)") + + if reason == .audioDeviceInUseByAnotherClient || reason == .videoDeviceInUseByAnotherClient { + showResumeButton = true + } + } + + // FIXME: Missing use of showResumeButton + } +} diff --git a/ios/ReactNativeCameraKit/RealPreviewView.swift b/ios/ReactNativeCameraKit/RealPreviewView.swift new file mode 100644 index 0000000000..56c9e96c9f --- /dev/null +++ b/ios/ReactNativeCameraKit/RealPreviewView.swift @@ -0,0 +1,25 @@ +// +// RealPreviewView.swift +// ReactNativeCameraKit +// + +import AVFoundation + +class RealPreviewView: UIView { + // Use AVCaptureVideoPreviewLayer as the view's backing layer. + override class var layerClass: AnyClass { + AVCaptureVideoPreviewLayer.self + } + + // Create an accessor for the right layer type + var previewLayer: AVCaptureVideoPreviewLayer { + // We can safely forcecast here, it can't change at runtime + return layer as! AVCaptureVideoPreviewLayer + } + + // Connect the layer to a capture session. + var session: AVCaptureSession? { + get { previewLayer.session } + set { previewLayer.session = newValue } + } +} diff --git a/ios/ReactNativeCameraKit/ScannerFrameView.swift b/ios/ReactNativeCameraKit/ScannerFrameView.swift new file mode 100644 index 0000000000..381da4f8a7 --- /dev/null +++ b/ios/ReactNativeCameraKit/ScannerFrameView.swift @@ -0,0 +1,99 @@ +// +// ScannerFrame.swift +// ReactNativeCameraKit +// + +import UIKit + +/* + * Frame for the barcode scanner + */ +class ScannerFrameView: UIView { + private let laserView = UIView() + private let frameViews: [UIView] = (0..<8).map { _ in UIView() } + + // MARK: - Lifecycle + + init(frameColor: UIColor, laserColor: UIColor) { + super.init(frame: .zero) + + laserView.backgroundColor = laserColor + addSubview(laserView) + + frameViews.forEach { + $0.backgroundColor = frameColor + addSubview($0) + } + } + + @available(*, unavailable) + required init?(coder aDecoder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + override func draw(_ rect: CGRect) { + super.draw(rect) + + frameViews.enumerated().forEach { (index, view) in + view.frame = sizeForFramePart(at: index) + } + + startAnimatingScanner() + } + + // MARK: - Public + + func startAnimatingScanner() { + if laserView.frame.origin.y != 0 { + laserView.frame = CGRect(x: 2, y: 2, width: frame.size.width - 4, height: 2) + } + + + UIView.animate(withDuration: 3, delay: 0, options: [.autoreverse, .repeat], animations: { + self.laserView.center = CGPoint(x: self.frame.size.width / 2, y: self.frame.size.height - 3) + }) + } + + func stopAnimatingScanner() { + laserView.removeFromSuperview() + } + + func update(frameColor: UIColor) { + frameViews.forEach { $0.backgroundColor = frameColor } + } + + func update(laserColor: UIColor) { + laserView.backgroundColor = laserColor + } + + // MARK: - Private + + private func sizeForFramePart(at index: Int) -> CGRect { + let cornerHeight: CGFloat = 20.0 + let cornerWidth: CGFloat = 2.0 + + switch index { + case 0: + return .init(x: 0, y: 0, width: cornerWidth, height: cornerHeight) + case 1: + return .init(x: 0, y: 0, width: cornerHeight, height: cornerWidth) + case 2: + return .init(x: bounds.width - cornerHeight, y: 0, width: cornerHeight, height: cornerWidth) + case 3: + return .init(x: bounds.width - cornerWidth, y: 0, width: cornerWidth, height: cornerHeight) + case 4: + return .init(x: bounds.width - cornerWidth, + y: bounds.height - cornerHeight, + width: cornerWidth, + height: cornerHeight) + case 5: + return .init(x: bounds.width - cornerHeight, y: bounds.height - cornerWidth, width: cornerHeight, height: cornerWidth) + case 6: + return .init(x: 0, y: bounds.height - cornerWidth, width: cornerHeight, height: cornerWidth) + case 7: + return .init(x: 0, y: bounds.height - cornerHeight, width: cornerWidth, height: cornerHeight) + default: + fatalError("unknown index") + } + } +} diff --git a/ios/ReactNativeCameraKit/ScannerInterfaceView.swift b/ios/ReactNativeCameraKit/ScannerInterfaceView.swift new file mode 100644 index 0000000000..dd6a6c2b54 --- /dev/null +++ b/ios/ReactNativeCameraKit/ScannerInterfaceView.swift @@ -0,0 +1,85 @@ +// +// ScannerInterfaceView.swift +// ReactNativeCameraKit +// + +import UIKit + +/* + * Full screen scanner interface + */ +class ScannerInterfaceView: UIView { + private let frameView: ScannerFrameView + private let topOverlayView = UIView() + private let bottomOverlayView = UIView() + private let leftOverlayView = UIView() + private let rightOverlayView = UIView() + + // MARK: - Constants + + private let frameOffset: CGFloat = 30 + private let frameHeight: CGFloat = 200 + private let overlayColor: UIColor = .black.withAlphaComponent(0.4) + + // MARK: - Lifecycle + + init(frameColor: UIColor, laserColor: UIColor) { + frameView = ScannerFrameView(frameColor: frameColor, laserColor: laserColor) + + super.init(frame: .zero) + + addSubview(frameView) + + frameView.startAnimatingScanner() + + [topOverlayView, bottomOverlayView, leftOverlayView, rightOverlayView].forEach { + $0.backgroundColor = overlayColor + addSubview($0) + } + } + + @available(*, unavailable) + required init?(coder aDecoder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + override func draw(_ rect: CGRect) { + super.draw(rect) + + frameView.frame = CGRect(x: 0, y: 0, width: bounds.size.width - 2 * frameOffset, height: frameHeight) + frameView.center = center + + updateOverlaySize(frameView.frame) + } + + // MARK: - Public + + var frameSize: CGRect { + return frameView.frame + } + + func startAnimatingScanner() { + frameView.startAnimatingScanner() + } + + func stopAnimatingScanner() { + frameView.stopAnimatingScanner() + } + + func update(frameColor: UIColor) { + frameView.update(frameColor: frameColor) + } + + func update(laserColor: UIColor) { + frameView.update(laserColor: laserColor) + } + + // MARK: - Private + + private func updateOverlaySize(_ frameRect: CGRect) { + topOverlayView.frame = CGRect(x: 0, y: 0, width: frame.size.width, height: frameRect.origin.y) + leftOverlayView.frame = CGRect(x: 0, y: frameRect.origin.y, width: frameOffset, height: frameHeight) + rightOverlayView.frame = CGRect(x: frameRect.size.width + frameOffset, y: frameRect.origin.y, width: frameOffset, height: frameHeight) + bottomOverlayView.frame = CGRect(x: 0, y: frameRect.origin.y + frameHeight, width: frame.size.width, height: frame.size.height - frameRect.origin.y - frameHeight) + } +} diff --git a/ios/ReactNativeCameraKit/SimulatorCamera.swift b/ios/ReactNativeCameraKit/SimulatorCamera.swift new file mode 100644 index 0000000000..0f118ccf45 --- /dev/null +++ b/ios/ReactNativeCameraKit/SimulatorCamera.swift @@ -0,0 +1,121 @@ +// +// SimulatorCamera.swift +// ReactNativeCameraKit +// + +import AVFoundation +import UIKit + +/* + * Fake camera implementation to be used on simulator + */ +class SimulatorCamera: CameraProtocol { + private var onOrientationChange: RCTDirectEventBlock? + + var previewView: UIView { mockPreview } + + private var fakeFocusFinishedTimer: Timer? + + // Create mock camera layer. When a photo is taken, we capture this layer and save it in place of a hardware input. + private let mockPreview = SimulatorPreviewView(frame: .zero) + + // MARK: - Public + + func setup(cameraType: CameraType, supportedBarcodeType: [AVMetadataObject.ObjectType]) { + DispatchQueue.main.async { + self.mockPreview.cameraTypeLabel.text = "Camera type: \(cameraType)" + } + + // Listen to orientation changes + UIDevice.current.beginGeneratingDeviceOrientationNotifications() + NotificationCenter.default.addObserver(forName: UIDevice.orientationDidChangeNotification, + object: UIDevice.current, + queue: nil, + using: { [weak self] notification in self?.orientationChanged(notification: notification) }) + + + } + + private func orientationChanged(notification: Notification) { + guard let device = notification.object as? UIDevice, + let orientation = Orientation(from: device.orientation) else { + return + } + + self.onOrientationChange?(["orientation": orientation.rawValue]) + } + + func cameraRemovedFromSuperview() { + NotificationCenter.default.removeObserver(self, name: UIDevice.orientationDidChangeNotification, object: UIDevice.current) + + } + + func update(onOrientationChange: RCTDirectEventBlock?) { + self.onOrientationChange = onOrientationChange + } + + func update(pinchScale: CGFloat) { + DispatchQueue.main.async { + self.mockPreview.zoomVelocityLabel.text = "Zoom Scale: \(pinchScale)" + } + } + + func focus(at: CGPoint, focusBehavior: FocusBehavior) { + DispatchQueue.main.async { + self.mockPreview.focusAtLabel.text = "Focus at: (\(Int(at.x)), \(Int(at.y))), focusMode: \(focusBehavior.avFocusMode)" + } + + // Fake focus finish after a second + fakeFocusFinishedTimer?.invalidate() + if case let .customFocus(_, _, focusFinished) = focusBehavior { + fakeFocusFinishedTimer = Timer.scheduledTimer(withTimeInterval: 1, repeats: false) { _ in + focusFinished() + } + } + } + + func update(torchMode: TorchMode) { + DispatchQueue.main.async { + self.mockPreview.torchModeLabel.text = "Torch mode: \(torchMode)" + } + } + + func update(flashMode: FlashMode) { + DispatchQueue.main.async { + self.mockPreview.flashModeLabel.text = "Flash mode: \(flashMode)" + } + } + + func update(cameraType: CameraType) { + DispatchQueue.main.async { + self.mockPreview.cameraTypeLabel.text = "Camera type: \(cameraType)" + + self.mockPreview.randomize() + } + } + + func isBarcodeScannerEnabled(_ isEnabled: Bool, + supportedBarcodeType: [AVMetadataObject.ObjectType], + onBarcodeRead: ((_ barcode: String) -> Void)?) {} + func update(scannerFrameSize: CGRect?) {} + + func capturePicture(onWillCapture: @escaping () -> Void, + onSuccess: @escaping (_ imageData: Data, _ thumbnailData: Data?) -> (), + onError: @escaping (_ message: String) -> ()) { + onWillCapture() + + DispatchQueue.main.async { + // Generate snapshot from main UI thread + let previewSnapshot = self.mockPreview.snapshot(withTimestamp: true) + + // Then switch to background thread + DispatchQueue.global(qos: .default).async { + if let imageData = previewSnapshot?.jpegData(compressionQuality: 0.85) { + onSuccess(imageData, nil) + } else { + onError("Failed to convert snapshot to JPEG data") + } + } + } + } +} diff --git a/ios/ReactNativeCameraKit/SimulatorPreviewView.swift b/ios/ReactNativeCameraKit/SimulatorPreviewView.swift new file mode 100644 index 0000000000..2da8d3a215 --- /dev/null +++ b/ios/ReactNativeCameraKit/SimulatorPreviewView.swift @@ -0,0 +1,153 @@ +// +// SimulatorPreviewView.swift +// ReactNativeCameraKit +// + +import UIKit + +class SimulatorPreviewView: UIView { + let zoomVelocityLabel = UILabel() + let focusAtLabel = UILabel() + let torchModeLabel = UILabel() + let flashModeLabel = UILabel() + let cameraTypeLabel = UILabel() + + var balloonLayer = CALayer() + + // MARK: - Lifecycle + + override init(frame: CGRect) { + super.init(frame: frame) + + layer.masksToBounds = true + + layer.insertSublayer(balloonLayer, at: 0) + + let stackView = UIStackView() + stackView.axis = .vertical + addSubview(stackView) + + stackView.translatesAutoresizingMaskIntoConstraints = false + stackView.topAnchor.constraint(equalTo: safeAreaLayoutGuide.topAnchor).isActive = true + stackView.leadingAnchor.constraint(equalTo: leadingAnchor, constant: 10).isActive = true + [zoomVelocityLabel, focusAtLabel, torchModeLabel, flashModeLabel, cameraTypeLabel].forEach { + $0.numberOfLines = 0 + stackView.addArrangedSubview($0) + } + } + + @available(*, unavailable) + required init?(coder aDecoder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + override func draw(_ rect: CGRect) { + super.draw(rect) + + randomize() + } + + // MARK: - Public + + func snapshot(withTimestamp showTimestamp: Bool) -> UIImage? { + UIGraphicsBeginImageContextWithOptions(bounds.size, false, 0) + drawHierarchy(in: bounds, afterScreenUpdates: false) + var image = UIGraphicsGetImageFromCurrentImageContext() + + if showTimestamp { + let date = Date() + let dateFormatter = DateFormatter() + dateFormatter.dateFormat = "HH:mm:ss" + let stringFromDate = dateFormatter.string(from: date) + let font = UIFont.boldSystemFont(ofSize: 20) + + image?.draw(in: CGRect(x: 0, y: 0, width: image?.size.width ?? 0, height: image?.size.height ?? 0)) + let rect = CGRect(x: 25, y: 125, width: image?.size.width ?? 0, height: image?.size.height ?? 0) + UIColor.white.set() + let textFontAttributes = [NSAttributedString.Key.font: font] + stringFromDate.draw(in: rect.integral, withAttributes: textFontAttributes) + + image = UIGraphicsGetImageFromCurrentImageContext() + } + + UIGraphicsEndImageContext() + return image + } + + func randomize() { + layer.backgroundColor = UIColor(hue: CGFloat(Double.random(in: 0...1)), saturation: 1.0, brightness: 1.0, alpha: 1.0).cgColor + balloonLayer.removeFromSuperlayer() + balloonLayer = CALayer() + layer.insertSublayer(balloonLayer, at: 0) + + for _ in 0..<5 { + drawBalloon() + } + } + + // MARK: - Private + + private func drawBalloon() { + let stringLength = CGFloat(200) + let radius = CGFloat(Int.random(in: 50...150)) + + let x = CGFloat(Int.random(in: 0...Int(frame.size.width))) + let y = CGFloat(Int.random(in: 0...Int(frame.size.height + radius + stringLength))) + let stretch = radius / 3 + + let balloon = CALayer() + balloon.frame = CGRect(x: x - radius, y: y - radius, width: radius * 2, height: radius * 2 + stringLength) + + // Balloon main circle + let circle = CAShapeLayer() + let colorHue = Double.random(in: 0...1) + + circle.path = UIBezierPath(ovalIn: CGRect(x: 0, y: 0, width: radius * 2, height: radius * 2 + stretch)).cgPath + circle.fillColor = UIColor(hue: colorHue, saturation: 1.0, brightness: 0.95, alpha: 1.0).cgColor + + // Balloon reflection + let reflection = CAShapeLayer() + reflection.path = UIBezierPath(ovalIn: CGRect(x: radius / 2, y: radius / 2, width: radius * 0.7, height: radius * 0.7)).cgPath + reflection.fillColor = UIColor(hue: colorHue, saturation: 1.0, brightness: 1.0, alpha: 1.0).cgColor + + // Balloon string + let line = CAShapeLayer() + let linePath = UIBezierPath() + let startPoint = CGPoint(x: balloon.frame.size.width / 2, y: radius * 2) + let endPoint = CGPoint(x: balloon.frame.size.width, y: (radius * 2) + stringLength) + linePath.move(to: startPoint) + linePath.addQuadCurve(to: endPoint, controlPoint: CGPoint(x: balloon.frame.size.width / 2, y: radius * 2 + stringLength / 2)) + line.path = linePath.cgPath + line.fillColor = nil + line.strokeColor = UIColor.darkGray.cgColor + line.opacity = 1.0 + line.lineWidth = radius * 0.05 + + // Add layers + balloon.addSublayer(line) + circle.addSublayer(reflection) + balloon.addSublayer(circle) + + balloonLayer.addSublayer(balloon) + + // Apply animation + let scale = CABasicAnimation(keyPath: "transform.scale") + scale.fromValue = NSNumber(value: 0.7) + scale.toValue = NSNumber(value: 1.0) + scale.duration = 10.0 + scale.fillMode = .forwards + scale.isRemovedOnCompletion = false + scale.autoreverses = true + scale.repeatCount = .greatestFiniteMagnitude + + let move = CABasicAnimation(keyPath: "position.y") + move.fromValue = NSNumber(value: balloon.frame.origin.y) + move.toValue = NSNumber(value: 0 - balloon.frame.size.height) + move.duration = Double.random(in: 30...100) + move.isRemovedOnCompletion = false + move.repeatCount = .greatestFiniteMagnitude + + balloon.add(scale, forKey: "scale") + balloon.add(move, forKey: "move") + } +} diff --git a/ios/ReactNativeCameraKit/Types.swift b/ios/ReactNativeCameraKit/Types.swift new file mode 100644 index 0000000000..331ebbcb87 --- /dev/null +++ b/ios/ReactNativeCameraKit/Types.swift @@ -0,0 +1,161 @@ +// +// Types.swift +// ReactNativeCameraKit +// + +import AVFoundation +import Foundation + +// Dummy class used for RCTConvert +@objc(CKType) class Types: NSObject {} + +@objc(CKCameraType) +public enum CameraType: Int, CustomStringConvertible { + case back + case front + + var avPosition: AVCaptureDevice.Position { + switch self { + case .back: return .back + case .front: return .front + } + } + + public var description: String { + switch self { + case .back: return "back" + case .front: return "front" + } + } +} + +@objc(CKFlashMode) +public enum FlashMode: Int, CustomStringConvertible { + case on + case off + case auto + + var avFlashMode: AVCaptureDevice.FlashMode { + switch self { + case .on: return .on + case .off: return .off + case .auto: return .auto + } + } + + public var description: String { + switch self { + case .on: return "on" + case .off: return "off" + case .auto: return "auto" + } + } +} + +@objc(CKTorchMode) +public enum TorchMode: Int, CustomStringConvertible { + case on + case off + + init(from string: String) { + switch string { + case "on": self = .on + default: self = .off + } + } + + var avTorchMode: AVCaptureDevice.TorchMode { + switch self { + case .on: return .on + case .off: return .off + } + } + + public var description: String { + switch self { + case .on: return "on" + case .off: return "off" + } + } +} + +@objc(CKFocusMode) +public enum FocusMode: Int, CustomStringConvertible { + case on + case off + + public var description: String { + switch self { + case .on: return "on" + case .off: return "off" + } + } +} + +@objc(CKZoomMode) +public enum ZoomMode: Int, CustomStringConvertible { + case on + case off + + public var description: String { + switch self { + case .on: return "on" + case .off: return "off" + } + } +} + +@objc(CKSetupResult) +enum SetupResult: Int { + case notStarted + case success + case cameraNotAuthorized + case sessionConfigurationFailed +} + +enum Orientation: Int { + case portrait = 0 // ⬆️ + case landscapeLeft = 1 // ⬅️ + case portraitUpsideDown = 2 // ⬇️ + case landscapeRight = 3 // ➡️ + + init?(from orientation: UIDeviceOrientation) { + switch orientation { + case .portrait: self = .portrait + case .landscapeLeft: self = .landscapeLeft + case .portraitUpsideDown: self = .portraitUpsideDown + case .landscapeRight: self = .landscapeRight + default: return nil + } + } + + init?(from orientation: UIInterfaceOrientation) { + switch orientation { + case .portrait: self = .portrait + case .landscapeLeft: self = .landscapeLeft + case .portraitUpsideDown: self = .portraitUpsideDown + case .landscapeRight: self = .landscapeRight + default: return nil + } + } + + var videoOrientation: AVCaptureVideoOrientation { + switch self { + case .portrait: return .portrait + case .landscapeLeft: return .landscapeLeft + case .portraitUpsideDown: return .portraitUpsideDown + case .landscapeRight: return .landscapeRight + } + } +} + +extension AVCaptureDevice.FocusMode: CustomStringConvertible { + public var description: String { + switch self { + case .autoFocus: return "autofocus" + case .continuousAutoFocus: return "continuousAutoFocus" + case .locked: return "locked" + @unknown default: return "unknown" + } + } +} diff --git a/src/Camera.android.tsx b/src/Camera.android.tsx index a69e58bcf2..07bb2b984a 100644 --- a/src/Camera.android.tsx +++ b/src/Camera.android.tsx @@ -16,9 +16,6 @@ const Camera = React.forwardRef((props: CameraProps, ref) => { // we must use the general module and tell it what View it's supposed to be using return await RNCameraKitModule.capture(options, findNodeHandle(nativeRef.current ?? null)); }, - setTorchMode: (mode = 'off') => { - RNCameraKitModule.setTorchMode(mode, findNodeHandle(nativeRef.current ?? null)); - }, requestDeviceCameraAuthorization: () => { throw new Error('Not implemented'); }, diff --git a/src/Camera.d.ts b/src/Camera.d.ts index 0a3d3a81e4..8464b4002b 100644 --- a/src/Camera.d.ts +++ b/src/Camera.d.ts @@ -34,6 +34,7 @@ export interface CameraProps { ratioOverlayColor?: number | string; resetFocusTimeout?: number; resetFocusWhenMotionDetected?: boolean; + scanThrottleDelay: number; } declare const Camera: React.FC; diff --git a/src/Camera.ios.tsx b/src/Camera.ios.tsx index 3762e76d67..c821db51f2 100644 --- a/src/Camera.ios.tsx +++ b/src/Camera.ios.tsx @@ -1,6 +1,5 @@ -import _cloneDeep from 'lodash/cloneDeep'; import React from 'react'; -import { requireNativeComponent, NativeModules, processColor } from 'react-native'; +import { requireNativeComponent, NativeModules } from 'react-native'; import { CameraApi } from './types'; import { CameraProps } from './Camera'; @@ -14,9 +13,6 @@ const Camera = React.forwardRef((props: CameraProps, ref: any) => { capture: async () => { return await CKCameraManager.capture({}); }, - setTorchMode: (mode = 'off') => { - CKCameraManager.setTorchMode(mode); - }, requestDeviceCameraAuthorization: async () => { return await CKCameraManager.checkDeviceCameraAuthorizationStatus(); }, @@ -25,10 +21,7 @@ const Camera = React.forwardRef((props: CameraProps, ref: any) => { }, })); - const transformedProps: CameraProps = _cloneDeep(props); - transformedProps.ratioOverlayColor = processColor(props.ratioOverlayColor); - - return ; + return ; }); Camera.defaultProps = { diff --git a/src/CameraScreen.tsx b/src/CameraScreen.tsx deleted file mode 100644 index e69f8ee82b..0000000000 --- a/src/CameraScreen.tsx +++ /dev/null @@ -1,443 +0,0 @@ -import PropTypes from 'prop-types'; -import React, { Component } from 'react'; -import { - StyleSheet, - Text, - View, - TouchableOpacity, - Image, - Dimensions, - Platform, - SafeAreaView, - ImageStyle, - ImageSourcePropType, -} from 'react-native'; -import _ from 'lodash'; -import Camera, { CameraProps } from './Camera'; -import { CameraApi, CameraType, CaptureData, FlashMode } from './types'; - -const { width, height } = Dimensions.get('window'); - -type Actions = { - leftButtonText?: string; - leftCaptureRetakeButtonText?: string; -}; - -type CameraRatioOverlay = { - ratios: string[]; -}; - -type FlashImages = { - on: ImageSourcePropType; - off: ImageSourcePropType; - auto: ImageSourcePropType; -}; - -type BottomButtonTypes = 'left' | 'capture'; - -type BottomPressedData = { - type: BottomButtonTypes; - captureImages: CaptureData[]; - captureRetakeMode: boolean; - image?: CaptureData; -}; - -type CameraScreenProps = CameraProps & { - // Controls - actions?: Actions; - flashImages?: FlashImages; - flashImageStyle?: ImageStyle; - torchOnImage?: ImageSourcePropType; - torchOffImage?: ImageSourcePropType; - torchImageStyle?: ImageStyle; - captureButtonImage?: ImageSourcePropType; - captureButtonImageStyle?: ImageStyle; - cameraFlipImage?: ImageSourcePropType; - cameraFlipImageStyle?: ImageStyle; - hideControls?: boolean; - onBottomButtonPressed?: (event: BottomPressedData) => void; - // Overlay - cameraRatioOverlay?: CameraRatioOverlay; - showCapturedImageCount?: boolean; - // Behavior - allowCaptureRetake?: boolean; -}; - -type FlashData = { - mode: FlashMode; - image?: ImageSourcePropType; -}; - -type State = { - captureImages: CaptureData[]; - flashData?: FlashData; - torchMode: boolean; - ratios: string[]; - ratioArrayPosition: number; - imageCaptured?: CaptureData; - captured: boolean; - cameraType: CameraType; -}; - -export default class CameraScreen extends Component { - static propTypes = { - allowCaptureRetake: PropTypes.bool, - }; - - static defaultProps = { - allowCaptureRetake: false, - }; - - currentFlashArrayPosition: number; - flashArray: FlashData[]; - camera: CameraApi; - - constructor(props: CameraScreenProps) { - super(props); - this.flashArray = [ - { - mode: 'auto', - image: props.flashImages?.auto, - }, - { - mode: 'on', - image: props.flashImages?.on, - }, - { - mode: 'off', - image: props.flashImages?.off, - }, - ]; - - this.currentFlashArrayPosition = this.props.flashMode - ? this.flashArray.findIndex((flashData) => flashData.mode === this.props.flashMode) - : 0; - - this.state = { - captureImages: [], - flashData: this.flashArray[this.currentFlashArrayPosition], - torchMode: this.props.torchMode === 'on' || false, - ratios: [], - ratioArrayPosition: -1, - imageCaptured: undefined, - captured: false, - cameraType: this.props.cameraType || CameraType.Back, - }; - } - - componentDidMount() { - let ratios: string[] = []; - if (this.props.cameraRatioOverlay) { - ratios = this.props.cameraRatioOverlay.ratios || []; - } - // eslint-disable-next-line react/no-did-mount-set-state - this.setState({ - ratios: ratios, - ratioArrayPosition: ratios.length > 0 ? 0 : -1, - }); - } - - isCaptureRetakeMode() { - return !!(this.props.allowCaptureRetake && !_.isUndefined(this.state.imageCaptured)); - } - - renderFlashButton() { - return ( - this.state.flashData?.image && - !this.isCaptureRetakeMode() && ( - this.onSetFlash()}> - - - ) - ); - } - - renderTorchButton() { - return ( - this.props.torchOnImage && - this.props.torchOffImage && - !this.isCaptureRetakeMode() && ( - this.onSetTorch()}> - - - ) - ); - } - - renderSwitchCameraButton() { - return ( - this.props.cameraFlipImage && - !this.isCaptureRetakeMode() && ( - this.onSwitchCameraPressed()}> - - - ) - ); - } - - renderTopButtons() { - return ( - !this.props.hideControls && ( - - {this.renderFlashButton()} - {this.renderSwitchCameraButton()} - {this.renderTorchButton()} - - ) - ); - } - - renderCamera() { - return ( - - {this.isCaptureRetakeMode() && this.state.imageCaptured ? ( - - ) : ( - (this.camera = cam)} - style={{ flex: 1, justifyContent: 'flex-end' }} - cameraType={this.state.cameraType} - flashMode={this.state.flashData?.mode} - torchMode={this.state.torchMode ? 'on' : 'off'} - ratioOverlay={this.state.ratios[this.state.ratioArrayPosition]} - /> - )} - - ); - } - - numberOfImagesTaken() { - const numberTook = this.state.captureImages.length; - if (numberTook >= 2) { - return numberTook; - } else if (this.state.captured) { - return '1'; - } else { - return ''; - } - } - - renderCaptureButton() { - return ( - this.props.captureButtonImage && - !this.isCaptureRetakeMode() && ( - - this.onCaptureImagePressed()}> - - {this.props.showCapturedImageCount && ( - - {this.numberOfImagesTaken()} - - )} - - - ) - ); - } - - renderRatioStrip() { - if (this.state.ratios.length === 0 || this.props.hideControls) { - return null; - } - return ( - - - Your images look best at a {this.state.ratios[0] || ''} ratio - this.onRatioButtonPressed()} - > - {this.props.ratioOverlay} - - - - ); - } - - sendBottomButtonPressedAction(type: BottomButtonTypes, captureRetakeMode: boolean, image?: CaptureData) { - if (this.props.onBottomButtonPressed) { - this.props.onBottomButtonPressed({ type, captureImages: this.state.captureImages, captureRetakeMode, image }); - } - } - - onBottomButtonPressed(type: BottomButtonTypes) { - const captureRetakeMode = this.isCaptureRetakeMode(); - if (captureRetakeMode) { - if (type === 'left') { - this.setState({ imageCaptured: undefined }); - } - } else { - this.sendBottomButtonPressedAction(type, captureRetakeMode, undefined); - } - } - - renderBottomButton(type: 'left') { - const showButton = true; - if (showButton) { - const buttonNameSuffix = this.isCaptureRetakeMode() ? 'CaptureRetakeButtonText' : 'ButtonText'; - const buttonText = _(this.props).get(`actions.${type}${buttonNameSuffix}`); - return ( - this.onBottomButtonPressed(type)} - > - {buttonText} - - ); - } else { - return ; - } - } - - renderBottomButtons() { - return ( - !this.props.hideControls && ( - - {this.renderBottomButton('left')} - {this.renderCaptureButton()} - - ) - ); - } - - onSwitchCameraPressed() { - const direction = this.state.cameraType === CameraType.Back ? CameraType.Front : CameraType.Back; - this.setState({ cameraType: direction }); - } - - onSetFlash() { - this.currentFlashArrayPosition = (this.currentFlashArrayPosition + 1) % 3; - const newFlashData = this.flashArray[this.currentFlashArrayPosition]; - this.setState({ flashData: newFlashData }); - } - - onSetTorch() { - this.setState({ torchMode: !this.state.torchMode }); - } - - async onCaptureImagePressed() { - const image = await this.camera.capture(); - - if (this.props.allowCaptureRetake) { - this.setState({ imageCaptured: image }); - } else { - if (image) { - this.setState({ - captured: true, - imageCaptured: image, - captureImages: _.concat(this.state.captureImages, image), - }); - } - this.sendBottomButtonPressedAction('capture', false, image); - } - } - - onRatioButtonPressed() { - const newRatiosArrayPosition = (this.state.ratioArrayPosition + 1) % this.state.ratios.length; - this.setState({ ratioArrayPosition: newRatiosArrayPosition }); - } - - render() { - return ( - - {Platform.OS === 'android' && this.renderCamera()} - {this.renderTopButtons()} - {Platform.OS !== 'android' && this.renderCamera()} - {this.renderRatioStrip()} - {Platform.OS === 'android' && } - {this.renderBottomButtons()} - - ); - } -} - -const styles = StyleSheet.create({ - bottomButtons: { - flex: 2, - flexDirection: 'row', - justifyContent: 'space-between', - padding: 14, - }, - textStyle: { - color: 'white', - fontSize: 20, - }, - ratioBestText: { - color: 'white', - fontSize: 18, - }, - ratioText: { - color: '#ffc233', - fontSize: 18, - }, - topButtons: { - flex: 1, - flexDirection: 'row', - justifyContent: 'space-between', - paddingTop: 8, - paddingBottom: 0, - }, - cameraContainer: { - ...Platform.select({ - android: { - position: 'absolute', - top: 0, - left: 0, - width, - height, - }, - default: { - flex: 10, - flexDirection: 'column', - }, - }), - }, - captureButtonContainer: { - flex: 1, - justifyContent: 'center', - alignItems: 'center', - }, - textNumberContainer: { - position: 'absolute', - top: 0, - left: 0, - bottom: 0, - right: 0, - justifyContent: 'center', - alignItems: 'center', - }, - bottomButton: { - flex: 1, - flexDirection: 'row', - alignItems: 'center', - padding: 10, - }, - bottomContainerGap: { - flex: 1, - flexDirection: 'row', - justifyContent: 'flex-end', - alignItems: 'center', - padding: 10, - }, - gap: { - flex: 10, - flexDirection: 'column', - }, -}); diff --git a/src/index.ts b/src/index.ts index 028369f1d3..1586cb1f22 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,7 +1,6 @@ import { NativeModules } from 'react-native'; import Camera from './Camera'; -import CameraScreen from './CameraScreen'; import type { CameraApi, CameraType, CaptureData, FlashMode, FocusMode, TorchMode, ZoomMode } from './types'; const { CameraKit } = NativeModules; @@ -16,4 +15,4 @@ export const Orientation = { export default CameraKit; -export { Camera, CameraScreen, CameraType, TorchMode, FlashMode, FocusMode, ZoomMode, CameraApi, CaptureData }; +export { Camera, CameraType, TorchMode, FlashMode, FocusMode, ZoomMode, CameraApi, CaptureData }; diff --git a/src/types.ts b/src/types.ts index 24d6c8dad7..15d70b3792 100644 --- a/src/types.ts +++ b/src/types.ts @@ -25,7 +25,6 @@ export type CaptureData = { export type CameraApi = { capture: () => Promise; - setTorchMode: (mode: TorchMode) => void; requestDeviceCameraAuthorization: () => Promise; checkDeviceCameraAuthorizationStatus: () => Promise; };