├── .eslintrc.json
├── .gitignore
├── CONTRIBUTING.md
├── LICENSE.md
├── README.md
├── RNRectangleScanner.podspec
├── android
├── .settings
│ └── org.eclipse.buildship.core.prefs
├── build.gradle
├── gradle.properties
├── gradle
│ └── wrapper
│ │ ├── gradle-wrapper.jar
│ │ └── gradle-wrapper.properties
├── gradlew
├── gradlew.bat
└── src
│ └── main
│ ├── AndroidManifest.xml
│ ├── java
│ └── com
│ │ └── rectanglescanner
│ │ ├── RNRectangleScannerManager.java
│ │ ├── RNRectangleScannerModule.java
│ │ ├── RectangleScannerPackage.java
│ │ ├── helpers
│ │ ├── CapturedImage.java
│ │ ├── ImageProcessor.java
│ │ ├── ImageProcessorMessage.java
│ │ └── Quadrilateral.java
│ │ └── views
│ │ ├── CameraDeviceController.java
│ │ ├── MainView.java
│ │ ├── RNRectangleScannerView.java
│ │ └── RectangleDetectionController.java
│ └── res
│ └── layout
│ └── activity_rectangle_scanner.xml
├── example
├── .gitignore
├── App.js
├── app.json
├── assets
│ ├── adaptive-icon.png
│ ├── favicon.png
│ ├── icon.png
│ └── splash.png
├── babel.config.js
├── package.json
├── src
│ ├── ScanDocument
│ │ ├── CameraControls.js
│ │ ├── DocumentScanner.js
│ │ ├── index.js
│ │ └── styles.js
│ └── useIsMultiTasking.js
└── yarn.lock
├── images
├── black-and-white.jpeg
├── color.jpeg
├── demo.gif
├── greyscale.jpeg
└── photo.jpeg
├── index.js
├── ios
├── CameraDeviceController.h
├── CameraDeviceController.m
├── RNRectangleScanner.xcodeproj
│ ├── project.pbxproj
│ └── xcshareddata
│ │ └── xcschemes
│ │ └── RNRectangleScanner.xcscheme
├── RNRectangleScannerManager.h
├── RNRectangleScannerManager.m
├── RNRectangleScannerView.h
├── RNRectangleScannerView.m
├── RectangleDetectionController.h
└── RectangleDetectionController.m
├── package.json
├── react-native.config.js
├── src
├── Filters.js
├── FlashAnimation.js
├── RectangleOverlay.js
├── Scanner.js
└── index.d.ts
└── yarn.lock
/.eslintrc.json:
--------------------------------------------------------------------------------
1 | {
2 | "env": {
3 | "jest": true
4 | },
5 | "extends": "airbnb",
6 | "parser": "babel-eslint",
7 | "rules": {
8 | "react/no-unescaped-entities": 0,
9 | "react/jsx-filename-extension": [1, { "extensions": [".js", ".jsx"] }],
10 | "function-paren-newline": ["error", "consistent"],
11 | "object-curly-newline": ["error", { "consistent": true }],
12 | "react/destructuring-assignment": 0,
13 | "jsx-a11y/accessible-emoji": 0
14 | }
15 | }
16 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # OSX
2 | #
3 | .DS_Store
4 |
5 | # XDE
6 | .expo/
7 |
8 | # VSCode
9 | .vscode/
10 | jsconfig.json
11 |
12 | # Xcode
13 | #
14 | build/
15 | *.pbxuser
16 | !default.pbxuser
17 | *.mode1v3
18 | !default.mode1v3
19 | *.mode2v3
20 | !default.mode2v3
21 | *.perspectivev3
22 | !default.perspectivev3
23 | xcuserdata
24 | *.xccheckout
25 | *.moved-aside
26 | DerivedData
27 | *.hmap
28 | *.ipa
29 | *.xcuserstate
30 | project.xcworkspace
31 |
32 | # Android/IntelliJ
33 | #
34 | build/
35 | .idea
36 | .gradle
37 | local.properties
38 | *.iml
39 |
40 | # node.js
41 | #
42 | node_modules/
43 | npm-debug.log
44 | yarn-debug.log
45 | yarn-error.log
46 |
47 | # BUCK
48 | buck-out/
49 | \.buckd/
50 | android/app/libs
51 | android/keystores/debug.keystore
52 |
53 | # generated by bob
54 | lib/
55 |
56 | # example expo app (ignore ios and android folders)
57 | example/ios/
58 | example/android/
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing
2 |
3 | ### Issues
4 | When opening an issue, try to be specific. For example, if you are opening an issue relating to the build process in android, it is helpful to include a stack trace and the gradle version you are using.
5 |
6 | I usually will reply to an issue within the first 24hrs or so asking for more information or providing help. If the issue requires a code fix, this will take longer.
7 |
8 | ### Pull Requests
9 | I'm always looking for additional help and am welcome to PRs! One thing to note, I am a big fan of understanding why code is being added or removed. So if you open a PR, please reference a link to why that change is being done (ex: Apple's docs say to do this... + link). This helps get the code merged in faster (otherwise, I will search the web and docs for the reason you are providing the PR.) and I think it helps other open programmers too.
10 |
11 | ### Design of Code
12 | This package is built for react developers. This means that the native code should not restrict the javascript functionality and instead supply a robust API. For example, instead of implementing a "Focus on Point" feature in iOS and Android, we instead supply the javascript with an api to focus the camera. The javascript developer can then implement their own algorithm for camera focusing if they wish. **When requesting a feature or creating a PR, you should take this into account**
13 |
--------------------------------------------------------------------------------
/LICENSE.md:
--------------------------------------------------------------------------------
1 | Copyright (c) 2020 GitHub Inc.
2 |
3 | Permission is hereby granted, free of charge, to any person obtaining
4 | a copy of this software and associated documentation files (the
5 | "Software"), to deal in the Software without restriction, including
6 | without limitation the rights to use, copy, modify, merge, publish,
7 | distribute, sublicense, and/or sell copies of the Software, and to
8 | permit persons to whom the Software is furnished to do so, subject to
9 | the following conditions:
10 |
11 | The above copyright notice and this permission notice shall be
12 | included in all copies or substantial portions of the Software.
13 |
14 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
15 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
16 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
17 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
18 | LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
19 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
20 | WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
21 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # `react-native-rectangle-scanner`
2 |
3 | ### ⚠️ Deprecation Notice ⚠️
4 | iOS and Android have come a long way since this package was first released. Both iOS Vision and Android Google Play Services allows you to use their built in document scanner, both are much much better than the capabilities of this package (including editing the detected boundaries).
5 |
6 |
7 | https://github.com/WebsiteBeaver/react-native-document-scanner-plugin is an NPM package that we switched to which supports those above mentioned native APIs and is also working well in our Expo app.
8 |
9 | I did release one last version for this package which corrected a few minor things allowing this to work with Expo 50+ in dev client mode.
10 |
11 |
12 | 
13 |
14 | [](https://www.npmjs.com/package/react-native-rectangle-scanner)  
15 |
16 | Live photo rectangle detection library useful for scanning documents. On capture, it returns the URIs for the original and a cropped version of the image allowing you to use the images as you want. You can additionally apply filters to adjust the visibility of text on the image (similar to the iOS document scanner filters).
17 |
18 | - Live detection
19 | - Perspective correction and crop of the image
20 | - Filters
21 | - Flash
22 | - Orientation Changes
23 | - Camera permission and capabilities detection
24 | - Fully customizable UI
25 |
26 | ## Getting started
27 |
28 | Install the library using either yarn:
29 |
30 | ```sh
31 | yarn add react-native-rectangle-scanner
32 | ```
33 |
34 | or npm:
35 |
36 | ```sh
37 | npm install react-native-rectangle-scanner --save
38 | ```
39 |
40 | you will also need to install `react-native-svg`, which is used for drawing the detected rectangle over the camera view.
41 |
42 | ### iOS Only
43 |
44 | CocoaPods on iOS needs this extra step:
45 |
46 | ```sh
47 | cd ios && pod install && cd ..
48 | ```
49 |
50 |
51 | **NOTE**: you need to be targeting iOS 10 or greater. Your pod file may need `platform :ios, '10.0'` at the top
52 |
53 | #### Info.plist
54 | Add Camera permissions request:
55 |
56 | Add the `NSCameraUsageDescription` tag, otherwise you will only see a black screen and no camera. iOS needs to know why you want to use the camera.
57 |
58 | ### Android Only
59 |
60 | If you do not have it already in your project, you must link openCV in your `settings.gradle` file
61 |
62 | ```java
63 | include ':openCVLibrary310'
64 | project(':openCVLibrary310').projectDir = new File(rootProject.projectDir,'../node_modules/react-native-rectangle-scanner/android/openCVLibrary310')
65 | ```
66 |
67 | #### In android/app/src/main/AndroidManifest.xml
68 |
69 | Add Camera permissions request:
70 |
71 | ```
72 |
73 | ```
74 |
75 | ## Usage
76 |
77 | This is the most barebones usage of this. It will show a fullscreen camera preview with no controls on it. Calling `this.camera.current.capture()` will trigger a capture and after the image has been captured and processed (cropped, filtered, stored/cached), it will trigger the `onPictureProcessed` callback.
78 |
79 |
80 | ```javascript
81 | import React, { Component, useRef } from "react"
82 | import { View, Image } from "react-native"
83 |
84 | import Scanner from "react-native-rectangle-scanner"
85 |
86 | class DocumentScanner extends Component {
87 |
88 | handleOnPictureProcessed = ({croppedImage, initialImage}) => {
89 | this.props.doSomethingWithCroppedImagePath(croppedImage);
90 | this.props.doSomethingWithOriginalImagePath(initialImage);
91 | }
92 |
93 | onCapture = () => {
94 | this.camera.current.capture();
95 | }
96 |
97 | render() {
98 | return (
99 |
104 | );
105 | }
106 | }
107 | ```
108 |
109 | Above is a very barebones version of the scanner. Check out a full example in [example folder](example/CompleteExample.js). That will handle device specific things, rendering error states, camera controls for different device sizes, mult tasking mode, etc. This is what I would consider the production ready version of using this package (it's actually very similar to the component(s) that we use in production.
110 |
111 | ## Simulators
112 |
113 | This package works on a simulator. Android has a pretty cool VR world that emulates a camera. On iOS the preview will just be a black screen, and the `onDeviceSetup` property will return false for the `hasCamera` attribute so you can show a custom message like "This device doesnt have a camera".
114 |
115 | ## Properties
116 |
117 | | Prop | Default | Type | Description |
118 | | :-------------------------- | :-----: | :-------: | :--------------------------------------------------------- |
119 | | filterId | `none` | `integer` | The id of the filter to use. [See More](#filters) |
120 | | enableTorch | `false` | `bool` | If the flashlight should be turned on |
121 | | capturedQuality | `0.5` | `float` | The jpeg quality of the output images |
122 | | onTorchChanged | `null` | `func` | Called when the system changes the flash state |
123 | | onRectangleDetected | `null` | `func` | Called when the system detects a rectangle on the image, sends the coordinates |
124 | | onPictureTaken | `null` | `func` | Called after an image is captured. It hasn't been cached yet but it will send you the URIs of where it will store it |
125 | | onPictureProcessed | `null` | `func` | Called after an image was captured and cached. It sends the URIs of where it stored the images. |
126 | | styles | `null` | `object` | Styles the camera view (works best on fullscreen/flex: 1). |
127 | | onErrorProcessingImage | `null` | `func` | Called if there was an error capturing the image. Includes a `message` and the paths it was trying to save if the error was failing to save the image. |
128 | | onDeviceSetup | `null` | `func` | Called after the system sets up the camera allowing you to configure the view for different device setups. |
129 | | androidPermission | `null` | `object or false` | ANDROID ONLY: Allows specifying the permission object on android or disabling entirely (pass `false`). |
130 |
131 |
132 | ### onDeviceSetup
133 | This callback is really important. When you show the Scanner component, it will start setting up the camera. The `onDeviceSetup({hasCamera, permissionToUseCamera, flashIsAvailable, previewHeightPercent, previewWidthPercent})` contains all the details you need to preset the camera view.
134 |
135 | `hasCamera` will notify you if the device even has a camera. iOS simulators do not have a camera for example. This gives you the chance to hide the camera preview and show an error or something.
136 |
137 | `permissionToUseCamera` will tell you if the user has granted permission to use the camera.
138 |
139 | `flashIsAvailable` tells you if the device has a flashlight that you can use.
140 |
141 | `previewHeightPercent` and `previewWidthPercent` contain percentages of the portrait view that the preview takes up. This is important because on android devices, there are preset preview sizes that may or may not match the screen size. So you can't just show the preview at full screen or the preview will be stretched. See the example on how I handle this.
142 |
143 |
144 | ### Torch
145 | When changing the `enableTorch` property, the system will call the `onTorchChanged({enabled})` callback as well with the new state. This allows you to keep your component state in sync. Natively the torch will get turned off when the component cleans up or after an image is captured. This allows you to update the state.
146 |
147 | ### Rectangle Detection
148 | Rectangle detection does NOT show up on the UI automatically. You must take the coordinates from the `onRectangleDetected({detectedRectangle})` callback and render a view that displays a rectangle over the camera view. This can be done easily with a simple SVG by importing `RectangleOverlay` from this package and feeding it the detected rectangle object.
149 |
150 | Why not just handle in natively? Because it allows much more customization of the rectangle overlay. For example, you could black out the entire image, except where the detected rectangle is. This also lets you control auto capture and UI changes on detection in javascript.
151 |
152 | #### Auto Capture
153 | Auto capturing is handled entirely in the `RectangleOverlay` component by simply setting its `allowDetection={true}` and `onDetectedCapture={this.captureImage}` props. See that component for documentation.
154 |
155 | #### Focusing
156 | iOS and some android devices support `continuous focus` mode on their cameras. This means we don't need to worry about focusing the camera ever. There is a function you can call on the ref `focus()` which will trigger a refocus on android devices. *This will likely get expanded in the future to support points so you can focus on a specific location.*
157 |
158 | ### Capturing An Image
159 | To capture an image, you must create a ref to the component. This ref will allow you to call `capture()` which will trigger the capture asynchronously.
160 |
161 | Once triggered, it will take the current detected rectangle and crop, apply filters, and transform the image to correct the perspective. It will call `onPictureTaken({croppedImage, initialImage})` containing the URIs of the cropped image and the original image. NOTE: The image still needs to be cached which can take a few ms, so loading the image will not work yet.
162 |
163 | The picture will then start to be processed and cached. Once done, it will call `onPictureProcessed({croppedImage, initialImage})` containing the URIs of the images. This is called after the image is cached which means you can load the images into the UI.
164 |
165 | NOTE: There is no UI changes when you capture an image. No screen flash, only a camera sound. This is meant so you can design how you want. *The easiest way is to just use an animated view to flash a white screen.* You can import the `FlashAnimation` component to do this if you want.
166 |
167 | **NOTE**: captured images are stored in the app's cache directory under the `CACHE_FOLDER_NAME`. This allows you to clear the cached images when you are done. (This is advised although these may get deleted by the system.)
168 |
169 | **NOTE**: on iOS, it will try to correct the rotation of the image. If you are in portrait mode, but the phone is rotated to landscape, it will rotate the captured image automatically.
170 |
171 | ### Filters
172 | Instead of allowing you to customize the contrast, saturation, etc of the image, I prebuilt the filters. This is because the filter controls are massively different between platforms and changing those values results in much different image outputs. Below are the avilable filters. Honestly, the color controls where pretty bad on android, so the best ones for android are probably just using the Color and Black & White instead of showing all 4 (they are only slightly better than Greyscale and the original photo).
173 |
174 | | ID | Name | Default | Description | Preview |
175 | | -- | ------------- | ------- | -------------------------------------- | -------------------------------------------|
176 | | 1 | Color | | Optimzed for legibility with color. |  |
177 | | 2 | Black & White | | Optimized for legibility without color |  |
178 | | 3 | Greyscale | | A black & white version of the image |  |
179 | | 4 | Photo | YES | Just the photo |  |
180 |
--------------------------------------------------------------------------------
/RNRectangleScanner.podspec:
--------------------------------------------------------------------------------
1 | require 'json'
2 |
3 | package = JSON.parse(File.read(File.join(__dir__, 'package.json')))
4 |
5 | Pod::Spec.new do |s|
6 | s.name = 'RNRectangleScanner'
7 | s.version = package['version']
8 | s.summary = package['description']
9 | s.description = package['description']
10 | s.license = package['license']
11 | s.author = package['author']
12 | s.homepage = 'https://github.com/HarvestProfit/react-native-rectangle-scanner'
13 | s.source = { git: 'https://github.com/HarvestProfit/react-native-rectangle-scanner.git', tag: s.version }
14 |
15 | s.requires_arc = true
16 | s.platform = :ios, '10.0'
17 |
18 | s.preserve_paths = 'README.md', 'package.json', 'index.js'
19 | s.source_files = 'ios/**/*.{h,m}'
20 |
21 | s.dependency 'React'
22 | end
23 |
--------------------------------------------------------------------------------
/android/.settings/org.eclipse.buildship.core.prefs:
--------------------------------------------------------------------------------
1 | connection.project.dir=
2 | eclipse.preferences.version=1
3 |
--------------------------------------------------------------------------------
/android/build.gradle:
--------------------------------------------------------------------------------
1 | buildscript {
2 | repositories {
3 | mavenCentral()
4 | google()
5 | maven {
6 | // All of React Native (JS, Obj-C sources, Android binaries) is installed from npm
7 | url "$rootDir/../node_modules/react-native/android"
8 | }
9 | }
10 | dependencies {
11 | classpath("com.android.tools.build:gradle:7.3.1")
12 | }
13 | }
14 |
15 | apply plugin: 'com.android.library'
16 |
17 | android {
18 | compileSdkVersion 33
19 |
20 | defaultConfig {
21 | minSdkVersion 16
22 | targetSdkVersion 33
23 | versionCode 1
24 | versionName "1.0"
25 | ndk {
26 | abiFilters "armeabi-v7a", "x86"
27 | }
28 | }
29 | }
30 |
31 | repositories {
32 | mavenCentral()
33 | }
34 |
35 | dependencies {
36 | implementation 'org.opencv:opencv:4.9.0'
37 | implementation 'com.facebook.react:react-native:+'
38 | }
--------------------------------------------------------------------------------
/android/gradle.properties:
--------------------------------------------------------------------------------
1 | # Project-wide Gradle settings.
2 |
3 | # IDE (e.g. Android Studio) users:
4 | # Gradle settings configured through the IDE *will override*
5 | # any settings specified in this file.
6 |
7 | # For more details on how to configure your build environment visit
8 | # http://www.gradle.org/docs/current/userguide/build_environment.html
9 |
10 | # Specifies the JVM arguments used for the daemon process.
11 | # The setting is particularly useful for tweaking memory settings.
12 | # Default value: -Xmx10248m -XX:MaxPermSize=256m
13 | # org.gradle.jvmargs=-Xmx2048m -XX:MaxPermSize=512m -XX:+HeapDumpOnOutOfMemoryError -Dfile.encoding=UTF-8
14 |
15 | # When configured, Gradle will run in incubating parallel mode.
16 | # This option should only be used with decoupled projects. More details, visit
17 | # http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
18 | # org.gradle.parallel=true
19 |
20 | # android.useDeprecatedNdk=true
21 |
--------------------------------------------------------------------------------
/android/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/HarvestProfit/react-native-rectangle-scanner/22fe3cfccf081f17b955d333796dd7d2fc50e8d9/android/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/android/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | #Thu Aug 01 13:05:36 CDT 2024
2 | distributionBase=GRADLE_USER_HOME
3 | distributionPath=wrapper/dists
4 | distributionUrl=https\://services.gradle.org/distributions/gradle-7.4-bin.zip
5 | zipStoreBase=GRADLE_USER_HOME
6 | zipStorePath=wrapper/dists
7 |
--------------------------------------------------------------------------------
/android/gradlew:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | ##############################################################################
4 | ##
5 | ## Gradle start up script for UN*X
6 | ##
7 | ##############################################################################
8 |
9 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
10 | DEFAULT_JVM_OPTS=""
11 |
12 | APP_NAME="Gradle"
13 | APP_BASE_NAME=`basename "$0"`
14 |
15 | # Use the maximum available, or set MAX_FD != -1 to use that value.
16 | MAX_FD="maximum"
17 |
18 | warn ( ) {
19 | echo "$*"
20 | }
21 |
22 | die ( ) {
23 | echo
24 | echo "$*"
25 | echo
26 | exit 1
27 | }
28 |
29 | # OS specific support (must be 'true' or 'false').
30 | cygwin=false
31 | msys=false
32 | darwin=false
33 | case "`uname`" in
34 | CYGWIN* )
35 | cygwin=true
36 | ;;
37 | Darwin* )
38 | darwin=true
39 | ;;
40 | MINGW* )
41 | msys=true
42 | ;;
43 | esac
44 |
45 | # For Cygwin, ensure paths are in UNIX format before anything is touched.
46 | if $cygwin ; then
47 | [ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
48 | fi
49 |
50 | # Attempt to set APP_HOME
51 | # Resolve links: $0 may be a link
52 | PRG="$0"
53 | # Need this for relative symlinks.
54 | while [ -h "$PRG" ] ; do
55 | ls=`ls -ld "$PRG"`
56 | link=`expr "$ls" : '.*-> \(.*\)$'`
57 | if expr "$link" : '/.*' > /dev/null; then
58 | PRG="$link"
59 | else
60 | PRG=`dirname "$PRG"`"/$link"
61 | fi
62 | done
63 | SAVED="`pwd`"
64 | cd "`dirname \"$PRG\"`/" >&-
65 | APP_HOME="`pwd -P`"
66 | cd "$SAVED" >&-
67 |
68 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
69 |
70 | # Determine the Java command to use to start the JVM.
71 | if [ -n "$JAVA_HOME" ] ; then
72 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
73 | # IBM's JDK on AIX uses strange locations for the executables
74 | JAVACMD="$JAVA_HOME/jre/sh/java"
75 | else
76 | JAVACMD="$JAVA_HOME/bin/java"
77 | fi
78 | if [ ! -x "$JAVACMD" ] ; then
79 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
80 |
81 | Please set the JAVA_HOME variable in your environment to match the
82 | location of your Java installation."
83 | fi
84 | else
85 | JAVACMD="java"
86 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
87 |
88 | Please set the JAVA_HOME variable in your environment to match the
89 | location of your Java installation."
90 | fi
91 |
92 | # Increase the maximum file descriptors if we can.
93 | if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
94 | MAX_FD_LIMIT=`ulimit -H -n`
95 | if [ $? -eq 0 ] ; then
96 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
97 | MAX_FD="$MAX_FD_LIMIT"
98 | fi
99 | ulimit -n $MAX_FD
100 | if [ $? -ne 0 ] ; then
101 | warn "Could not set maximum file descriptor limit: $MAX_FD"
102 | fi
103 | else
104 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
105 | fi
106 | fi
107 |
108 | # For Darwin, add options to specify how the application appears in the dock
109 | if $darwin; then
110 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
111 | fi
112 |
113 | # For Cygwin, switch paths to Windows format before running java
114 | if $cygwin ; then
115 | APP_HOME=`cygpath --path --mixed "$APP_HOME"`
116 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
117 |
118 | # We build the pattern for arguments to be converted via cygpath
119 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
120 | SEP=""
121 | for dir in $ROOTDIRSRAW ; do
122 | ROOTDIRS="$ROOTDIRS$SEP$dir"
123 | SEP="|"
124 | done
125 | OURCYGPATTERN="(^($ROOTDIRS))"
126 | # Add a user-defined pattern to the cygpath arguments
127 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then
128 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
129 | fi
130 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
131 | i=0
132 | for arg in "$@" ; do
133 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
134 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
135 |
136 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
137 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
138 | else
139 | eval `echo args$i`="\"$arg\""
140 | fi
141 | i=$((i+1))
142 | done
143 | case $i in
144 | (0) set -- ;;
145 | (1) set -- "$args0" ;;
146 | (2) set -- "$args0" "$args1" ;;
147 | (3) set -- "$args0" "$args1" "$args2" ;;
148 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
149 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
150 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
151 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
152 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
153 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
154 | esac
155 | fi
156 |
157 | # Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
158 | function splitJvmOpts() {
159 | JVM_OPTS=("$@")
160 | }
161 | eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
162 | JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
163 |
164 | exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
165 |
--------------------------------------------------------------------------------
/android/gradlew.bat:
--------------------------------------------------------------------------------
1 | @if "%DEBUG%" == "" @echo off
2 | @rem ##########################################################################
3 | @rem
4 | @rem Gradle startup script for Windows
5 | @rem
6 | @rem ##########################################################################
7 |
8 | @rem Set local scope for the variables with windows NT shell
9 | if "%OS%"=="Windows_NT" setlocal
10 |
11 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
12 | set DEFAULT_JVM_OPTS=
13 |
14 | set DIRNAME=%~dp0
15 | if "%DIRNAME%" == "" set DIRNAME=.
16 | set APP_BASE_NAME=%~n0
17 | set APP_HOME=%DIRNAME%
18 |
19 | @rem Find java.exe
20 | if defined JAVA_HOME goto findJavaFromJavaHome
21 |
22 | set JAVA_EXE=java.exe
23 | %JAVA_EXE% -version >NUL 2>&1
24 | if "%ERRORLEVEL%" == "0" goto init
25 |
26 | echo.
27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
28 | echo.
29 | echo Please set the JAVA_HOME variable in your environment to match the
30 | echo location of your Java installation.
31 |
32 | goto fail
33 |
34 | :findJavaFromJavaHome
35 | set JAVA_HOME=%JAVA_HOME:"=%
36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
37 |
38 | if exist "%JAVA_EXE%" goto init
39 |
40 | echo.
41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
42 | echo.
43 | echo Please set the JAVA_HOME variable in your environment to match the
44 | echo location of your Java installation.
45 |
46 | goto fail
47 |
48 | :init
49 | @rem Get command-line arguments, handling Windowz variants
50 |
51 | if not "%OS%" == "Windows_NT" goto win9xME_args
52 | if "%@eval[2+2]" == "4" goto 4NT_args
53 |
54 | :win9xME_args
55 | @rem Slurp the command line arguments.
56 | set CMD_LINE_ARGS=
57 | set _SKIP=2
58 |
59 | :win9xME_args_slurp
60 | if "x%~1" == "x" goto execute
61 |
62 | set CMD_LINE_ARGS=%*
63 | goto execute
64 |
65 | :4NT_args
66 | @rem Get arguments from the 4NT Shell from JP Software
67 | set CMD_LINE_ARGS=%$
68 |
69 | :execute
70 | @rem Setup the command line
71 |
72 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
73 |
74 | @rem Execute Gradle
75 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
76 |
77 | :end
78 | @rem End local scope for the variables with windows NT shell
79 | if "%ERRORLEVEL%"=="0" goto mainEnd
80 |
81 | :fail
82 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
83 | rem the _cmd.exe /c_ return code!
84 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
85 | exit /b 1
86 |
87 | :mainEnd
88 | if "%OS%"=="Windows_NT" endlocal
89 |
90 | :omega
91 |
--------------------------------------------------------------------------------
/android/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/android/src/main/java/com/rectanglescanner/RNRectangleScannerManager.java:
--------------------------------------------------------------------------------
1 | package com.rectanglescanner;
2 |
3 | import android.app.Activity;
4 | import com.rectanglescanner.views.MainView;
5 | import com.facebook.react.bridge.WritableMap;
6 | import com.facebook.react.common.MapBuilder;
7 | import com.facebook.react.uimanager.ThemedReactContext;
8 | import com.facebook.react.uimanager.ViewGroupManager;
9 | import com.facebook.react.uimanager.annotations.ReactProp;
10 |
11 |
12 | import javax.annotation.Nullable;
13 | import java.util.Map;
14 |
15 | /**
16 | * Created by Jake on Jan 6, 2020.
17 | */
18 | public class RNRectangleScannerManager extends ViewGroupManager {
19 |
20 | private static final String REACT_CLASS = "RNRectangleScanner";
21 | private MainView view = null;
22 |
23 | @Override
24 | public String getName() {
25 | return REACT_CLASS;
26 | }
27 |
28 | @Override
29 | protected MainView createViewInstance(final ThemedReactContext reactContext) {
30 | MainView.createInstance(reactContext, (Activity) reactContext.getBaseContext());
31 | view = MainView.getInstance();
32 | return view;
33 | }
34 |
35 | // MARK: Props
36 | @ReactProp(name = "enableTorch", defaultBoolean = false)
37 | public void setEnableTorch(MainView view, Boolean enable) {
38 | view.setEnableTorch(enable);
39 | }
40 |
41 | @ReactProp(name = "capturedQuality", defaultDouble = 0.5)
42 | public void setCapturedQuality(MainView view, double quality) {
43 | view.setCapturedQuality(quality);
44 | }
45 |
46 | @ReactProp(name = "filterId", defaultInt = 1)
47 | public void setFilterId(MainView view, int filterId) {
48 | view.setFilterId(filterId);
49 | }
50 |
51 | // Life cycle Events
52 | @Override
53 | public @Nullable Map getExportedCustomDirectEventTypeConstants() {
54 | return MapBuilder.of(
55 | "onDeviceSetup", MapBuilder.of("registrationName", "onDeviceSetup"),
56 |
57 | "onPictureTaken", MapBuilder.of("registrationName", "onPictureTaken"),
58 |
59 | "onPictureProcessed", MapBuilder.of("registrationName", "onPictureProcessed"),
60 |
61 | "onErrorProcessingImage", MapBuilder.of("registrationName", "onErrorProcessingImage"),
62 |
63 | "onRectangleDetected", MapBuilder.of("registrationName", "onRectangleDetected"),
64 |
65 | "onTorchChanged", MapBuilder.of("registrationName", "onTorchChanged")
66 | );
67 | }
68 | }
69 |
--------------------------------------------------------------------------------
/android/src/main/java/com/rectanglescanner/RNRectangleScannerModule.java:
--------------------------------------------------------------------------------
1 | package com.rectanglescanner;
2 |
3 | import com.rectanglescanner.views.MainView;
4 | import com.facebook.react.bridge.ReactApplicationContext;
5 | import com.facebook.react.bridge.ReactContextBaseJavaModule;
6 | import com.facebook.react.bridge.ReactMethod;
7 |
8 | /**
9 | * Created by Jake on Jan 6, 2020.
10 | */
11 |
12 | public class RNRectangleScannerModule extends ReactContextBaseJavaModule{
13 |
14 | public RNRectangleScannerModule(ReactApplicationContext reactContext){
15 | super(reactContext);
16 | }
17 |
18 | @Override
19 | public String getName() {
20 | return "RNRectangleScannerManager";
21 | }
22 |
23 | @ReactMethod
24 | public void start(){
25 | MainView view = MainView.getInstance();
26 | view.startCamera();
27 | }
28 |
29 | @ReactMethod
30 | public void stop(){
31 | MainView view = MainView.getInstance();
32 | view.stopCamera();
33 | }
34 |
35 | @ReactMethod
36 | public void cleanup(){
37 | MainView view = MainView.getInstance();
38 | view.cleanupCamera();
39 | }
40 |
41 | @ReactMethod
42 | public void refresh(){
43 | MainView view = MainView.getInstance();
44 | view.stopCamera();
45 | view.startCamera();
46 | }
47 |
48 | @ReactMethod
49 | public void capture(){
50 | MainView view = MainView.getInstance();
51 | view.capture();
52 | }
53 |
54 | @ReactMethod
55 | public void focus() {
56 | MainView view = MainView.getInstance();
57 | view.focusCamera();
58 | }
59 | }
60 |
--------------------------------------------------------------------------------
/android/src/main/java/com/rectanglescanner/RectangleScannerPackage.java:
--------------------------------------------------------------------------------
1 | package com.rectanglescanner;
2 |
3 | import com.facebook.react.ReactPackage;
4 | import com.facebook.react.bridge.JavaScriptModule;
5 | import com.facebook.react.bridge.NativeModule;
6 | import com.facebook.react.bridge.ReactApplicationContext;
7 | import com.facebook.react.uimanager.ViewManager;
8 |
9 | import java.util.Arrays;
10 | import java.util.Collections;
11 | import java.util.List;
12 |
13 | /**
14 | * Created by Jake on Jan 6, 2020.
15 | */
16 | public class RectangleScannerPackage implements ReactPackage {
17 | @Override
18 | public List createNativeModules(ReactApplicationContext reactContext) {
19 | return Arrays.asList(
20 | new RNRectangleScannerModule(reactContext)
21 | );
22 | }
23 |
24 | @Override
25 | public List createViewManagers(ReactApplicationContext reactContext) {
26 | return Arrays.asList(
27 | new RNRectangleScannerManager()
28 | );
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/android/src/main/java/com/rectanglescanner/helpers/CapturedImage.java:
--------------------------------------------------------------------------------
1 | package com.rectanglescanner.helpers;
2 |
3 | import org.opencv.core.Mat;
4 | import org.opencv.core.Point;
5 | import org.opencv.core.Size;
6 |
7 | /**
8 | * Created by Jake on Jan 6, 2020.
9 | */
10 | public class CapturedImage {
11 |
12 | public Mat original;
13 | public Mat processed;
14 | public Point[] previewPoints;
15 | public Size previewSize;
16 | public Size originalSize;
17 |
18 | public Point[] originalPoints;
19 |
20 | public int heightWithRatio;
21 | public int widthWithRatio;
22 |
23 | public CapturedImage(Mat original) {
24 | this.original = original;
25 | }
26 |
27 | public Mat getProcessed() {
28 | return processed;
29 | }
30 |
31 | public CapturedImage setProcessed(Mat processed) {
32 | this.processed = processed;
33 | return this;
34 | }
35 |
36 | public void release() {
37 | if (processed != null) {
38 | processed.release();
39 | }
40 | if (original != null) {
41 | original.release();
42 | }
43 | }
44 | }
45 |
--------------------------------------------------------------------------------
/android/src/main/java/com/rectanglescanner/helpers/ImageProcessor.java:
--------------------------------------------------------------------------------
1 | package com.rectanglescanner.helpers;
2 |
3 | import android.content.Context;
4 | import android.content.SharedPreferences;
5 | import android.os.Handler;
6 | import android.os.Looper;
7 | import android.os.Message;
8 | import android.preference.PreferenceManager;
9 | import android.util.Log;
10 |
11 | import com.rectanglescanner.views.RectangleDetectionController;
12 | import com.rectanglescanner.helpers.ImageProcessorMessage;
13 | import com.rectanglescanner.helpers.Quadrilateral;
14 | import com.rectanglescanner.helpers.CapturedImage;
15 |
16 | import android.view.Surface;
17 |
18 | import org.opencv.core.Core;
19 | import org.opencv.core.CvType;
20 | import org.opencv.core.Mat;
21 | import org.opencv.core.MatOfPoint;
22 | import org.opencv.core.MatOfPoint2f;
23 | import org.opencv.core.Point;
24 | import org.opencv.core.Size;
25 | import org.opencv.imgcodecs.Imgcodecs;
26 | import org.opencv.imgproc.Imgproc;
27 |
28 | import android.os.Bundle;
29 | import java.util.ArrayList;
30 | import java.util.Arrays;
31 | import java.util.Collections;
32 | import java.util.Comparator;
33 |
34 | import com.facebook.react.bridge.Arguments;
35 |
36 | /**
37 | Created by Jake on Jan 6, 2020.
38 |
39 | Async processes either the image preview frame to detect rectangles, or
40 | the captured image to crop and apply filters.
41 | */
42 | public class ImageProcessor extends Handler {
43 |
44 | private static final String TAG = "ImageProcessor";
45 | private final RectangleDetectionController mMainActivity;
46 | private Quadrilateral lastDetectedRectangle = null;
47 |
48 | public ImageProcessor(Looper looper, RectangleDetectionController mainActivity, Context context) {
49 | super(looper);
50 | this.mMainActivity = mainActivity;
51 | SharedPreferences sharedPref = PreferenceManager.getDefaultSharedPreferences(context);
52 | }
53 |
54 | /**
55 | Receives an event message to handle async
56 | */
57 | public void handleMessage(Message msg) {
58 | if (msg.obj.getClass() == ImageProcessorMessage.class) {
59 |
60 | ImageProcessorMessage obj = (ImageProcessorMessage) msg.obj;
61 |
62 | String command = obj.getCommand();
63 |
64 | Log.d(TAG, "Message Received: " + command + " - " + obj.getObj().toString());
65 | if (command.equals("previewFrame")) {
66 | processPreviewFrame((Mat) obj.getObj());
67 | } else if (command.equals("pictureTaken")) {
68 | processCapturedImage((Mat) obj.getObj());
69 | }
70 | }
71 | }
72 |
73 | /**
74 | Detect a rectangle in the current frame from the camera video
75 | */
76 | private void processPreviewFrame(Mat frame) {
77 | rotateImageForScreen(frame);
78 | detectRectangleInFrame(frame);
79 | frame.release();
80 | mMainActivity.setImageProcessorBusy(false);
81 | }
82 |
83 | /**
84 | Process a single frame from the camera video
85 | */
86 | private void processCapturedImage(Mat capturedImage) {
87 | // Mat capturedImage = Imgcodecs.imdecode(picture, Imgcodecs.IMREAD_UNCHANGED);
88 | // picture.release();
89 |
90 | Log.d(TAG, "processCapturedImage - imported image " + capturedImage.size().width + "x" + capturedImage.size().height);
91 |
92 | rotateImageForScreen(capturedImage);
93 |
94 | CapturedImage doc = cropImageToLatestQuadrilateral(capturedImage);
95 |
96 | mMainActivity.onProcessedCapturedImage(doc);
97 | doc.release();
98 | capturedImage.release();
99 |
100 | mMainActivity.setImageProcessorBusy(false);
101 | }
102 |
103 | /**
104 | Detects a rectangle from the image and sets the last detected rectangle
105 | */
106 | private void detectRectangleInFrame(Mat inputRgba) {
107 | ArrayList contours = findContours(inputRgba);
108 | Size srcSize = inputRgba.size();
109 | this.lastDetectedRectangle = getQuadrilateral(contours, srcSize);
110 | Bundle data = new Bundle();
111 | if (this.lastDetectedRectangle != null) {
112 | Bundle quadMap = this.lastDetectedRectangle.toBundle();
113 | data.putBundle("detectedRectangle", quadMap);
114 | } else {
115 | data.putBoolean("detectedRectangle", false);
116 | }
117 |
118 | mMainActivity.rectangleWasDetected(Arguments.fromBundle(data));
119 | }
120 |
121 | /**
122 | Crops the image to the latest detected rectangle and fixes perspective
123 | */
124 | private CapturedImage cropImageToLatestQuadrilateral(Mat capturedImage) {
125 | applyFilters(capturedImage);
126 |
127 | Mat doc;
128 | if (this.lastDetectedRectangle != null) {
129 | Mat croppedCapturedImage = this.lastDetectedRectangle.cropImageToRectangleSize(capturedImage);
130 | doc = fourPointTransform(croppedCapturedImage, this.lastDetectedRectangle.getPointsForSize(croppedCapturedImage.size()));
131 | croppedCapturedImage.release();
132 | } else {
133 | doc = new Mat(capturedImage.size(), CvType.CV_8UC4);
134 | capturedImage.copyTo(doc);
135 | }
136 |
137 | Core.flip(doc.t(), doc, 0);
138 | Core.flip(capturedImage.t(), capturedImage, 0);
139 | CapturedImage sd = new CapturedImage(capturedImage);
140 |
141 | sd.originalSize = capturedImage.size();
142 | sd.heightWithRatio = Double.valueOf(sd.originalSize.width).intValue();
143 | sd.widthWithRatio = Double.valueOf(sd.originalSize.height).intValue();
144 | return sd.setProcessed(doc);
145 | }
146 |
147 | private Quadrilateral getQuadrilateral(ArrayList contours, Size srcSize) {
148 |
149 | int height = Double.valueOf(srcSize.height).intValue();
150 | int width = Double.valueOf(srcSize.width).intValue();
151 | Size size = new Size(width, height);
152 |
153 | Log.i(TAG, "Size----->" + size);
154 | for (MatOfPoint c : contours) {
155 | MatOfPoint2f c2f = new MatOfPoint2f(c.toArray());
156 | double peri = Imgproc.arcLength(c2f, true);
157 | MatOfPoint2f approx = new MatOfPoint2f();
158 | Imgproc.approxPolyDP(c2f, approx, 0.02 * peri, true);
159 |
160 | Point[] points = approx.toArray();
161 |
162 | // select biggest 4 angles polygon
163 | // if (points.length == 4) {
164 | Point[] foundPoints = sortPoints(points);
165 |
166 | if (insideArea(foundPoints, size)) {
167 |
168 | return new Quadrilateral(c, foundPoints, new Size(srcSize.width, srcSize.height));
169 | }
170 | // }
171 | }
172 |
173 | return null;
174 | }
175 |
176 | private Point[] sortPoints(Point[] src) {
177 |
178 | ArrayList srcPoints = new ArrayList<>(Arrays.asList(src));
179 |
180 | Point[] result = { null, null, null, null };
181 |
182 | Comparator sumComparator = new Comparator() {
183 | @Override
184 | public int compare(Point lhs, Point rhs) {
185 | return Double.compare(lhs.y + lhs.x, rhs.y + rhs.x);
186 | }
187 | };
188 |
189 | Comparator diffComparator = new Comparator() {
190 |
191 | @Override
192 | public int compare(Point lhs, Point rhs) {
193 | return Double.compare(lhs.y - lhs.x, rhs.y - rhs.x);
194 | }
195 | };
196 |
197 | // top-left corner = minimal sum
198 | result[0] = Collections.min(srcPoints, sumComparator);
199 |
200 | // bottom-right corner = maximal sum
201 | result[2] = Collections.max(srcPoints, sumComparator);
202 |
203 | // top-right corner = minimal difference
204 | result[1] = Collections.min(srcPoints, diffComparator);
205 |
206 | // bottom-left corner = maximal difference
207 | result[3] = Collections.max(srcPoints, diffComparator);
208 |
209 | return result;
210 | }
211 |
212 | private boolean insideArea(Point[] rp, Size size) {
213 |
214 | int width = Double.valueOf(size.width).intValue();
215 | int height = Double.valueOf(size.height).intValue();
216 |
217 | int minimumSize = width / 10;
218 |
219 | boolean isANormalShape = rp[0].x != rp[1].x && rp[1].y != rp[0].y && rp[2].y != rp[3].y && rp[3].x != rp[2].x;
220 | boolean isBigEnough = ((rp[1].x - rp[0].x >= minimumSize) && (rp[2].x - rp[3].x >= minimumSize)
221 | && (rp[3].y - rp[0].y >= minimumSize) && (rp[2].y - rp[1].y >= minimumSize));
222 |
223 | double leftOffset = rp[0].x - rp[3].x;
224 | double rightOffset = rp[1].x - rp[2].x;
225 | double bottomOffset = rp[0].y - rp[1].y;
226 | double topOffset = rp[2].y - rp[3].y;
227 |
228 | boolean isAnActualRectangle = ((leftOffset <= minimumSize && leftOffset >= -minimumSize)
229 | && (rightOffset <= minimumSize && rightOffset >= -minimumSize)
230 | && (bottomOffset <= minimumSize && bottomOffset >= -minimumSize)
231 | && (topOffset <= minimumSize && topOffset >= -minimumSize));
232 |
233 | return isANormalShape && isAnActualRectangle && isBigEnough;
234 | }
235 |
236 | private Mat fourPointTransform(Mat src, Point[] pts) {
237 | Point tl = pts[0];
238 | Point tr = pts[1];
239 | Point br = pts[2];
240 | Point bl = pts[3];
241 |
242 | double widthA = Math.sqrt(Math.pow(br.x - bl.x, 2) + Math.pow(br.y - bl.y, 2));
243 | double widthB = Math.sqrt(Math.pow(tr.x - tl.x, 2) + Math.pow(tr.y - tl.y, 2));
244 |
245 | double dw = Math.max(widthA, widthB);
246 | int maxWidth = Double.valueOf(dw).intValue();
247 |
248 | double heightA = Math.sqrt(Math.pow(tr.x - br.x, 2) + Math.pow(tr.y - br.y, 2));
249 | double heightB = Math.sqrt(Math.pow(tl.x - bl.x, 2) + Math.pow(tl.y - bl.y, 2));
250 |
251 | double dh = Math.max(heightA, heightB);
252 | int maxHeight = Double.valueOf(dh).intValue();
253 |
254 | Mat doc = new Mat(maxHeight, maxWidth, CvType.CV_8UC4);
255 |
256 | Mat src_mat = new Mat(4, 1, CvType.CV_32FC2);
257 | Mat dst_mat = new Mat(4, 1, CvType.CV_32FC2);
258 |
259 | src_mat.put(0, 0, tl.x, tl.y, tr.x, tr.y, br.x, br.y,
260 | bl.x, bl.y);
261 | dst_mat.put(0, 0, 0.0, 0.0, dw, 0.0, dw, dh, 0.0, dh);
262 |
263 | Mat m = Imgproc.getPerspectiveTransform(src_mat, dst_mat);
264 |
265 | Imgproc.warpPerspective(src, doc, m, doc.size());
266 |
267 | return doc;
268 | }
269 |
270 | private ArrayList findContours(Mat src) {
271 |
272 | Mat grayImage;
273 | Mat cannedImage;
274 | Mat resizedImage;
275 |
276 | int height = Double.valueOf(src.size().height).intValue();
277 | int width = Double.valueOf(src.size().width).intValue();
278 | Size size = new Size(width, height);
279 |
280 | resizedImage = new Mat(size, CvType.CV_8UC4);
281 | grayImage = new Mat(size, CvType.CV_8UC4);
282 | cannedImage = new Mat(size, CvType.CV_8UC1);
283 |
284 | Imgproc.resize(src, resizedImage, size);
285 | Imgproc.cvtColor(resizedImage, grayImage, Imgproc.COLOR_RGBA2GRAY, 4);
286 | Imgproc.GaussianBlur(grayImage, grayImage, new Size(5, 5), 0);
287 | Imgproc.Canny(grayImage, cannedImage, 80, 100, 3, false);
288 |
289 | ArrayList contours = new ArrayList<>();
290 | Mat hierarchy = new Mat();
291 |
292 | Imgproc.findContours(cannedImage, contours, hierarchy, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE);
293 |
294 | hierarchy.release();
295 |
296 | Collections.sort(contours, new Comparator() {
297 |
298 | @Override
299 | public int compare(MatOfPoint lhs, MatOfPoint rhs) {
300 | return Double.compare(Imgproc.contourArea(rhs), Imgproc.contourArea(lhs));
301 | }
302 | });
303 |
304 | resizedImage.release();
305 | grayImage.release();
306 | cannedImage.release();
307 |
308 | return contours;
309 | }
310 |
311 | /*!
312 | Applies filters to the image based on the set filter
313 | */
314 | public void applyFilters(Mat image) {
315 | int filterId = this.mMainActivity.getFilterId();
316 | switch (filterId) {
317 | case 1: {
318 | // original image
319 | break;
320 | }
321 | case 2: {
322 | applyGreyscaleFilterToImage(image);
323 | break;
324 | }
325 | case 3: {
326 | applyColorFilterToImage(image);
327 | break;
328 | }
329 | case 4: {
330 | applyBlackAndWhiteFilterToImage(image);
331 | break;
332 | }
333 | default:
334 | // original image
335 | }
336 | }
337 |
338 | /*!
339 | Slightly enhances the black and white image
340 | */
341 | public Mat applyGreyscaleFilterToImage(Mat image)
342 | {
343 | Imgproc.cvtColor(image, image, Imgproc.COLOR_RGBA2GRAY);
344 | return image;
345 | }
346 |
347 | /*!
348 | Slightly enhances the black and white image
349 | */
350 | public Mat applyBlackAndWhiteFilterToImage(Mat image)
351 | {
352 | Imgproc.cvtColor(image, image, Imgproc.COLOR_RGBA2GRAY);
353 | image.convertTo(image, -1, 1, 10);
354 | return image;
355 | }
356 |
357 | /*!
358 | Slightly enhances the color on the image
359 | */
360 | public Mat applyColorFilterToImage(Mat image)
361 | {
362 | image.convertTo(image, -1, 1.2, 0);
363 | return image;
364 | }
365 |
366 |
367 | public void rotateImageForScreen(Mat image) {
368 | switch (this.mMainActivity.lastDetectedRotation) {
369 | case Surface.ROTATION_90: {
370 | // Do nothing
371 | break;
372 | }
373 | case Surface.ROTATION_180: {
374 | Core.flip(image.t(), image, 0);
375 | break;
376 | }
377 | case Surface.ROTATION_270: {
378 | Core.flip(image, image, 0);
379 | Core.flip(image, image, 1);
380 | break;
381 | }
382 | case Surface.ROTATION_0:
383 | default: {
384 | Core.flip(image.t(), image, 1);
385 | break;
386 | }
387 | }
388 | }
389 | }
390 |
--------------------------------------------------------------------------------
/android/src/main/java/com/rectanglescanner/helpers/ImageProcessorMessage.java:
--------------------------------------------------------------------------------
1 | package com.rectanglescanner.helpers;
2 |
3 | /**
4 | * Created by Jake on Jan 6, 2020.
5 | */
6 | public class ImageProcessorMessage {
7 |
8 | private String command;
9 | private Object obj;
10 |
11 | public ImageProcessorMessage(String command , Object obj ) {
12 | setObj(obj);
13 | setCommand(command);
14 | }
15 |
16 |
17 | public String getCommand() {
18 | return command;
19 | }
20 |
21 | public void setCommand(String command) {
22 | this.command = command;
23 | }
24 |
25 | public Object getObj() {
26 | return obj;
27 | }
28 |
29 | public void setObj(Object obj) {
30 | this.obj = obj;
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/android/src/main/java/com/rectanglescanner/helpers/Quadrilateral.java:
--------------------------------------------------------------------------------
1 | package com.rectanglescanner.helpers;
2 |
3 | import org.opencv.core.MatOfPoint;
4 | import org.opencv.core.Rect;
5 | import org.opencv.core.Mat;
6 | import org.opencv.core.Point;
7 | import org.opencv.core.Size;
8 |
9 | import android.os.Bundle;
10 |
11 | /**
12 | * Created by Jake on Jan 6, 2020.
13 | * Represents the detected rectangle from an image
14 | */
15 | public class Quadrilateral {
16 | public MatOfPoint contour;
17 | public Point[] points;
18 | public Size sourceSize;
19 |
20 | public Quadrilateral(MatOfPoint contour, Point[] points, Size sourceSize) {
21 | this.contour = contour;
22 | this.points = points;
23 | this.sourceSize = sourceSize;
24 | }
25 |
26 | /**
27 | Crops the edges of the image to the aspect ratio of the detected rectangle.
28 | */
29 | public Mat cropImageToRectangleSize(Mat image) {
30 | Size imageSize = image.size();
31 | double rectangleRatio = this.sourceSize.height / this.sourceSize.width;
32 | double imageRatio = imageSize.height / imageSize.width;
33 |
34 | double cropHeight = imageSize.height;
35 | double cropWidth = imageSize.width;
36 | // Used to center the crop in the middle
37 | int rectangleXCoord = 0;
38 | int rectangleYCoord = 0;
39 | if (imageRatio > rectangleRatio) {
40 | // Height should be cropped
41 | cropHeight = cropWidth * rectangleRatio;
42 | rectangleYCoord = (int)((imageSize.height - cropHeight) / 2);
43 | } else {
44 | // Width should be cropped
45 | cropWidth = cropHeight / rectangleRatio;
46 | rectangleXCoord = (int)((imageSize.width - cropWidth) / 2);
47 | }
48 |
49 | Rect rectCrop = new Rect(rectangleXCoord, rectangleYCoord, (int)cropWidth, (int)cropHeight);
50 | return new Mat(image, rectCrop);
51 | }
52 |
53 | /**
54 | Returns the points of the rectangle scaled to the given size
55 | */
56 | public Point[] getPointsForSize(Size outputSize) {
57 | double scale = outputSize.height / this.sourceSize.height;
58 | if (scale == 1) {
59 | return this.points;
60 | }
61 |
62 | Point[] scaledPoints = new Point[4];
63 | for (int i = 0;i < this.points.length;i++ ) {
64 | scaledPoints[i] = this.points[i].clone();
65 | scaledPoints[i].x *= scale;
66 | scaledPoints[i].y *= scale;
67 | }
68 |
69 | return scaledPoints;
70 | }
71 |
72 |
73 | /**
74 | Returns the rectangle as a bundle object
75 | */
76 | public Bundle toBundle() {
77 | Bundle quadMap = new Bundle();
78 |
79 | Bundle bottomLeft = new Bundle();
80 | bottomLeft.putDouble("x", this.points[2].x);
81 | bottomLeft.putDouble("y", this.points[2].y);
82 | quadMap.putBundle("bottomLeft", bottomLeft);
83 |
84 | Bundle bottomRight = new Bundle();
85 | bottomRight.putDouble("x", this.points[1].x);
86 | bottomRight.putDouble("y", this.points[1].y);
87 | quadMap.putBundle("bottomRight", bottomRight);
88 |
89 | Bundle topLeft = new Bundle();
90 | topLeft.putDouble("x", this.points[3].x);
91 | topLeft.putDouble("y", this.points[3].y);
92 | quadMap.putBundle("topLeft", topLeft);
93 |
94 | Bundle topRight = new Bundle();
95 | topRight.putDouble("x", this.points[0].x);
96 | topRight.putDouble("y", this.points[0].y);
97 | quadMap.putBundle("topRight", topRight);
98 |
99 | Bundle dimensions = new Bundle();
100 | dimensions.putDouble("height", this.sourceSize.height);
101 | dimensions.putDouble("width", this.sourceSize.width);
102 | quadMap.putBundle("dimensions", dimensions);
103 |
104 | return quadMap;
105 | }
106 | }
107 |
--------------------------------------------------------------------------------
/android/src/main/java/com/rectanglescanner/views/CameraDeviceController.java:
--------------------------------------------------------------------------------
1 | package com.rectanglescanner.views;
2 |
3 | import android.app.Activity;
4 | import android.content.Context;
5 | import android.content.pm.PackageManager;
6 | import android.graphics.Bitmap;
7 | import android.graphics.BitmapFactory;
8 | import android.graphics.ImageFormat;
9 | import android.hardware.Camera;
10 | import android.hardware.Camera.PictureCallback;
11 | import android.media.AudioManager;
12 | import android.media.MediaActionSound;
13 | import android.os.Build;
14 | import android.util.AttributeSet;
15 | import android.util.Log;
16 | import android.view.Display;
17 | import android.view.Surface;
18 | import android.view.SurfaceHolder;
19 | import android.view.SurfaceView;
20 | import android.view.View;
21 | import android.view.WindowManager;
22 | import android.content.res.Configuration;
23 | import android.widget.FrameLayout;
24 |
25 | import com.rectanglescanner.R;
26 | import com.facebook.react.bridge.WritableMap;
27 | import com.facebook.react.bridge.WritableNativeMap;
28 |
29 | import org.opencv.android.JavaCameraView;
30 | import org.opencv.android.Utils;
31 | import org.opencv.core.CvType;
32 | import org.opencv.core.Mat;
33 | import org.opencv.core.Size;
34 | import org.opencv.imgproc.Imgproc;
35 | import java.util.List;
36 |
37 |
38 | /**
39 | Created by Jake on Jan 6, 2020.
40 |
41 | Handles Generic camera device setup and capture
42 | */
43 | public class CameraDeviceController extends JavaCameraView implements PictureCallback {
44 | public static final String TAG = "CameraDeviceController";
45 | protected Context mContext;
46 | private SurfaceView mSurfaceView;
47 | private SurfaceHolder mSurfaceHolder;
48 | protected final boolean mBugRotate = false;
49 | protected boolean safeToTakePicture;
50 | protected Activity mActivity;
51 | private PictureCallback pCallback;
52 | protected Boolean enableTorch = false;
53 | public int lastDetectedRotation = Surface.ROTATION_0;
54 | protected View mView = null;
55 |
56 |
57 | protected boolean cameraIsSetup = false;
58 | protected boolean isStopped = true;
59 | private WritableMap deviceConfiguration = new WritableNativeMap();
60 | private int captureDevice = -1;
61 | private boolean imageProcessorBusy = true;
62 | private boolean cameraRequiresManualAutoFocus = false;
63 |
64 | private static CameraDeviceController mThis;
65 |
66 | public CameraDeviceController(Context context, AttributeSet attrs) {
67 | super(context, attrs);
68 | }
69 |
70 | public CameraDeviceController(Context context, Integer numCam, Activity activity, FrameLayout frameLayout) {
71 | super(context, numCam);
72 | this.mContext = context;
73 | this.mActivity = activity;
74 | pCallback = this;
75 | mView = frameLayout;
76 |
77 | context.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
78 | }
79 |
80 | //================================================================================
81 | // Setters
82 | //================================================================================
83 |
84 | /**
85 | Toggles the flash on the camera device
86 | */
87 | public void setEnableTorch(boolean enableTorch) {
88 | this.enableTorch = enableTorch;
89 |
90 | if (mCamera != null) {
91 | Camera.Parameters p = mCamera.getParameters();
92 | p.setFlashMode(enableTorch ? Camera.Parameters.FLASH_MODE_TORCH : Camera.Parameters.FLASH_MODE_OFF);
93 | mCamera.setParameters(p);
94 |
95 | }
96 |
97 | torchWasChanged(enableTorch);
98 | }
99 | protected void torchWasChanged(boolean torchEnabled) {}
100 |
101 |
102 | /**
103 | Cleans up the camera view
104 | */
105 | public void cleanupCamera() {
106 | if (mCamera != null) {
107 | mCamera.stopPreview();
108 | mCamera.setPreviewCallback(null);
109 | mCamera.release();
110 | mCamera = null;
111 | this.cameraIsSetup = false;
112 | }
113 | }
114 |
115 | /**
116 | Stops and restarts the camera
117 | */
118 | private void refreshCamera() {
119 | stopCamera();
120 | startCamera();
121 | }
122 |
123 | /**
124 | Starts the capture session
125 | */
126 | public void startCamera() {
127 | Log.d(TAG, "Starting preview");
128 | if (this.isStopped) {
129 | try {
130 | if (!this.cameraIsSetup) {
131 | setupCameraView();
132 | }
133 | mCamera.setPreviewDisplay(mSurfaceHolder);
134 | mCamera.startPreview();
135 | mCamera.setPreviewCallback(this);
136 | this.isStopped = false;
137 | } catch (Exception e) {
138 | Log.d(TAG, "Error starting preview: " + e);
139 | }
140 | }
141 | }
142 |
143 | /**
144 | Stops the capture session
145 | */
146 | public void stopCamera() {
147 | Log.d(TAG, "Stopping preview");
148 | if (!this.isStopped) {
149 | try {
150 | if (mCamera != null) {
151 | mCamera.stopPreview();
152 | }
153 | this.isStopped = true;
154 | }
155 | catch (Exception e) {
156 | Log.d(TAG, "Error stopping preview: " + e);
157 | }
158 | }
159 | }
160 |
161 | /**
162 | Tell the camera to focus
163 | */
164 | public void focusCamera() {
165 | Log.d(TAG, "Autofocusing");
166 | mCamera.autoFocus(null);
167 | }
168 |
169 | /**
170 | Sets the device configuration flash setting
171 | */
172 | public void setDeviceConfigurationFlashAvailable(boolean isAvailable) {
173 | this.deviceConfiguration.putBoolean("flashIsAvailable", isAvailable);
174 | }
175 |
176 | /**
177 | Sets the device configuration permission setting
178 | */
179 | public void setDeviceConfigurationPermissionToUseCamera(boolean granted){
180 | this.deviceConfiguration.putBoolean("permissionToUseCamera", granted);
181 | }
182 |
183 | /**
184 | Sets the device configuration camera availablility
185 | */
186 | public void setDeviceConfigurationHasCamera(boolean isAvailable){
187 | this.deviceConfiguration.putBoolean("hasCamera", isAvailable);
188 | }
189 |
190 | /**
191 | Sets the percent size of the camera preview
192 | */
193 | public void setDeviceConfigurationPreviewPercentSize(double heightPercent, double widthPercent) {
194 | this.deviceConfiguration.putDouble("previewHeightPercent", heightPercent);
195 | this.deviceConfiguration.putDouble("previewWidthPercent", widthPercent);
196 | }
197 |
198 | /**
199 | Sets the inital device configuration
200 | */
201 | public void resetDeviceConfiguration()
202 | {
203 | this.deviceConfiguration = new WritableNativeMap();
204 | setDeviceConfigurationFlashAvailable(false);
205 | setDeviceConfigurationPermissionToUseCamera(false);
206 | setDeviceConfigurationHasCamera(false);
207 | setDeviceConfigurationPreviewPercentSize(1.0, 1.0);
208 | }
209 |
210 | /**
211 | Called after the camera and session are set up. This lets you check if a
212 | camera is found and permission is granted to use it.
213 | */
214 | public void commitDeviceConfiguration() {
215 | deviceWasSetup(this.deviceConfiguration);
216 | }
217 | protected void deviceWasSetup(WritableMap config) {}
218 |
219 | //================================================================================
220 | // Getters
221 | //================================================================================
222 |
223 | private int getCameraDevice() {
224 | int cameraId = -1;
225 | // Search for the back facing camera
226 | // get the number of cameras
227 | int numberOfCameras = Camera.getNumberOfCameras();
228 | // for every camera check
229 | for (int i = 0; i < numberOfCameras; i++) {
230 | Camera.CameraInfo info = new Camera.CameraInfo();
231 | Camera.getCameraInfo(i, info);
232 | if (info.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
233 | cameraId = i;
234 | break;
235 | }
236 | cameraId = i;
237 | }
238 | return cameraId;
239 | }
240 |
241 | /**
242 | Given a list of resolution sizes and a ratio to fit to, it will find the highest resolution
243 | that fits the ratio the best.
244 | */
245 | private Camera.Size getOptimalResolution(float ratioToFitTo, List resolutionList) {
246 | int maxPixels = 0;
247 | int ratioMaxPixels = 0;
248 | double bestRatioDifference = 5;
249 | Camera.Size currentMaxRes = null;
250 | Camera.Size ratioCurrentMaxRes = null;
251 | for (Camera.Size r : resolutionList) {
252 | float pictureRatio = (float) r.width / r.height;
253 | Log.d(TAG, "supported resolution: " + r.width + "x" + r.height + " ratio: " + pictureRatio + " ratioToFitTo: " + ratioToFitTo);
254 | int resolutionPixels = r.width * r.height;
255 | double ratioDifference = Math.abs(ratioToFitTo - pictureRatio);
256 | if (resolutionPixels > ratioMaxPixels && ratioDifference < bestRatioDifference) {
257 | ratioMaxPixels = resolutionPixels;
258 | ratioCurrentMaxRes = r;
259 | bestRatioDifference = ratioDifference;
260 | }
261 |
262 | if (resolutionPixels > maxPixels) {
263 | maxPixels = resolutionPixels;
264 | currentMaxRes = r;
265 | }
266 | }
267 |
268 | if (ratioCurrentMaxRes != null) {
269 |
270 | Log.d(TAG, "Max supported resolution with aspect ratio: " + ratioCurrentMaxRes.width + "x"
271 | + ratioCurrentMaxRes.height);
272 | return ratioCurrentMaxRes;
273 |
274 | }
275 |
276 | return currentMaxRes;
277 | }
278 |
279 | //================================================================================
280 | // Setup
281 | //================================================================================
282 |
283 |
284 | /**
285 | Creates a session for the camera device and outputs it to a preview view.
286 | @note Called on view did load
287 | */
288 | public void setupCameraView()
289 | {
290 | resetDeviceConfiguration();
291 | if (mSurfaceView == null) {
292 | mSurfaceView = mView.findViewById(R.id.surfaceView);
293 | mSurfaceHolder = this.getHolder();
294 | mSurfaceHolder.addCallback(this);
295 | mSurfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
296 | }
297 | setupCamera();
298 | commitDeviceConfiguration();
299 | // [self listenForOrientationChanges];
300 | this.cameraIsSetup = true;
301 | }
302 |
303 |
304 |
305 | /**
306 | Sets up the hardware and capture session asking for permission to use the camera if needed.
307 | */
308 | public void setupCamera() {
309 | if (!setupCaptureDevice()) {
310 | return;
311 | }
312 |
313 | Camera.Parameters param;
314 | param = mCamera.getParameters();
315 |
316 | PackageManager pm = mActivity.getPackageManager();
317 |
318 | if (pm.hasSystemFeature(PackageManager.FEATURE_CAMERA_FLASH)) {
319 | param.setFlashMode(Camera.Parameters.FLASH_MODE_OFF);
320 | }
321 |
322 | if (param.getSupportedFocusModes().contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)) {
323 | param.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
324 | } else if (param.getSupportedFocusModes().contains(Camera.Parameters.FOCUS_MODE_AUTO)) {
325 | param.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
326 | cameraRequiresManualAutoFocus = true;
327 | }
328 |
329 | param.setPictureFormat(ImageFormat.JPEG);
330 | mCamera.setDisplayOrientation(getScreenRotationOnPhone());
331 |
332 | Display display = mActivity.getWindowManager().getDefaultDisplay();
333 | android.graphics.Point size = new android.graphics.Point();
334 | if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) {
335 | display.getRealSize(size);
336 | }
337 |
338 | int displayWidth = Math.min(size.y, size.x);
339 | int displayHeight = Math.max(size.y, size.x);
340 | float displayRatio = (float) displayHeight / displayWidth;
341 |
342 | Camera.Size pSize = getOptimalResolution(displayRatio, getResolutionList());
343 | param.setPreviewSize(pSize.width, pSize.height);
344 | param.setWhiteBalance(Camera.Parameters.WHITE_BALANCE_AUTO);
345 | float previewRatio = (float) pSize.width / pSize.height;
346 | setDevicePreviewSize(previewRatio);
347 |
348 | Camera.Size maxRes = getOptimalResolution(previewRatio, getPictureResolutionList());
349 | if (maxRes != null) {
350 | param.setPictureSize(maxRes.width, maxRes.height);
351 | Log.d(TAG, "max supported picture resolution: " + maxRes.width + "x" + maxRes.height);
352 | }
353 |
354 | try {
355 | mCamera.setParameters(param);
356 | setDeviceConfigurationPermissionToUseCamera(true);
357 | safeToTakePicture = true;
358 | } catch (Exception e) {
359 | Log.d(TAG, "failed to initialize the camera settings");
360 | }
361 | }
362 |
363 | /**
364 | Sets the surface preview ratio size. Some android devices will have a different
365 | sized preview than their full screen size so this allows for some size adjusting
366 | so the preview's aspect ratio is intact
367 | */
368 | public void setDevicePreviewSize(float previewRatio) {
369 | Display display = mActivity.getWindowManager().getDefaultDisplay();
370 | android.graphics.Point size = new android.graphics.Point();
371 | if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) {
372 | display.getRealSize(size);
373 | }
374 |
375 | int displayWidth = Math.min(size.y, size.x);
376 | int displayHeight = Math.max(size.y, size.x);
377 |
378 | float displayRatio = (float) displayHeight / displayWidth;
379 |
380 | int previewHeight = displayHeight;
381 | int previewWidth = displayWidth;
382 |
383 | int sizeY = size.y;
384 | int sizeX = size.x;
385 | if (this.lastDetectedRotation == Surface.ROTATION_90 || this.lastDetectedRotation == Surface.ROTATION_270) {
386 | sizeY = size.x;
387 | sizeX = size.y;
388 | }
389 |
390 | if (displayRatio > previewRatio) {
391 | // Adjust height
392 | previewHeight = (int) ((float) sizeY / displayRatio * previewRatio);
393 | } else if (displayRatio < previewRatio) {
394 | // Adjust Width
395 | previewWidth = (int) ((float) sizeX * displayRatio / previewRatio);
396 | }
397 |
398 |
399 | double percentOfScreenSizeHeight = (double) previewHeight / displayHeight;
400 | double percentOfScreenSizeWidth = (double) previewWidth / displayWidth;
401 | setDeviceConfigurationPreviewPercentSize(percentOfScreenSizeHeight, percentOfScreenSizeWidth);
402 | }
403 |
404 |
405 | /**
406 | Finds a physical camera, configures it, and sets the captureDevice property to it
407 | @return boolean if the camera was found and opened correctly
408 | */
409 | public boolean setupCaptureDevice() {
410 | this.captureDevice = getCameraDevice();
411 |
412 | try {
413 | int cameraId = getCameraDevice();
414 | mCamera = Camera.open(cameraId);
415 | } catch (RuntimeException e) {
416 | System.err.println(e);
417 | return false;
418 | }
419 | setDeviceConfigurationHasCamera(true);
420 |
421 | PackageManager pm = mActivity.getPackageManager();
422 | if (pm.hasSystemFeature(PackageManager.FEATURE_CAMERA_FLASH)) {
423 | setDeviceConfigurationFlashAvailable(true);
424 | }
425 | return true;
426 | }
427 |
428 | //================================================================================
429 | // Capture Image
430 | //================================================================================
431 |
432 |
433 | public void captureImageLater() {
434 | PackageManager pm = mActivity.getPackageManager();
435 | if (this.safeToTakePicture) {
436 |
437 | this.safeToTakePicture = false;
438 |
439 | try {
440 | if (cameraRequiresManualAutoFocus) {
441 | mCamera.autoFocus(new Camera.AutoFocusCallback() {
442 | @Override
443 | public void onAutoFocus(boolean success, Camera camera) {
444 | if (success) {
445 | takePicture();
446 | } else {
447 | onPictureFailed();
448 | }
449 | }
450 | });
451 | } else {
452 | takePicture();
453 | }
454 | } catch (Exception e) {
455 | onPictureFailed();
456 | }
457 | }
458 | }
459 |
460 | private void takePicture() {
461 | mCamera.takePicture(null, null, pCallback);
462 | makeShutterSound();
463 | }
464 |
465 | private void onPictureFailed() {
466 | Log.d(TAG, "failed to capture image");
467 | mCamera.cancelAutoFocus();
468 | this.safeToTakePicture = true;
469 | }
470 |
471 | /**
472 | Responds to the capture image call. It will apply a few filters and call handleCapturedImage which can be overrided for more processing
473 | */
474 | @Override
475 | public void onPictureTaken(byte[] data, Camera camera) {
476 | setEnableTorch(false);
477 | this.safeToTakePicture = true;
478 |
479 | Bitmap bitmap = BitmapFactory.decodeByteArray(data, 0, data.length);
480 |
481 | Mat picture = new Mat();
482 | Bitmap bmp32 = bitmap.copy(Bitmap.Config.ARGB_8888, true);
483 | Utils.bitmapToMat(bmp32, picture);
484 |
485 | Mat mat = new Mat();
486 | Imgproc.cvtColor(picture, mat, Imgproc.COLOR_BGR2RGB, 4);
487 |
488 | handleCapturedImage(mat);
489 | }
490 | public void handleCapturedImage(Mat capturedImage) {}
491 |
492 |
493 | public int getScreenRotationOnPhone() {
494 | final Display display = ((WindowManager) mContext
495 | .getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
496 |
497 | this.lastDetectedRotation = display.getRotation();
498 | switch (this.lastDetectedRotation) {
499 | case Surface.ROTATION_0:
500 | return 90;
501 |
502 | case Surface.ROTATION_90:
503 | return 0;
504 |
505 | case Surface.ROTATION_180:
506 | return 270;
507 |
508 | case Surface.ROTATION_270:
509 | return 180;
510 | }
511 | return 90;
512 | }
513 |
514 | @Override
515 | public void onConfigurationChanged(Configuration newConfig) {
516 | super.onConfigurationChanged(newConfig);
517 | mCamera.setDisplayOrientation(getScreenRotationOnPhone());
518 | }
519 |
520 | @Override
521 | public void surfaceDestroyed(SurfaceHolder holder) {
522 | cleanupCamera();
523 | }
524 |
525 | /**
526 | Processes the image output from the capture session.
527 | */
528 | @Override
529 | public void onPreviewFrame(byte[] data, Camera camera) {
530 | try {
531 | mSurfaceView.setVisibility(SurfaceView.VISIBLE);
532 | Camera.Size pictureSize = camera.getParameters().getPreviewSize();
533 | Mat yuv = new Mat(new Size(pictureSize.width, pictureSize.height * 1.5), CvType.CV_8UC1);
534 | yuv.put(0, 0, data);
535 |
536 | Mat mat = new Mat(new Size(pictureSize.width, pictureSize.height), CvType.CV_8UC4);
537 | Imgproc.cvtColor(yuv, mat, Imgproc.COLOR_YUV2RGBA_NV21, 4);
538 |
539 | yuv.release();
540 |
541 | processOutput(mat);
542 | } catch(Exception e) {
543 | Log.d(TAG, "Error processing preview frame: " + e);
544 | }
545 | }
546 |
547 | public void processOutput(Mat image) {}
548 |
549 | private void makeShutterSound() {
550 | AudioManager audio = (AudioManager) mActivity.getSystemService(Context.AUDIO_SERVICE);
551 |
552 | if (audio.getRingerMode() == AudioManager.RINGER_MODE_NORMAL) {
553 | MediaActionSound sound = new MediaActionSound();
554 | sound.play(MediaActionSound.SHUTTER_CLICK);
555 | }
556 | }
557 |
558 | private List getResolutionList() {
559 | return mCamera.getParameters().getSupportedPreviewSizes();
560 | }
561 |
562 | private List getPictureResolutionList() {
563 | return mCamera.getParameters().getSupportedPictureSizes();
564 | }
565 |
566 | }
567 |
--------------------------------------------------------------------------------
/android/src/main/java/com/rectanglescanner/views/MainView.java:
--------------------------------------------------------------------------------
1 | package com.rectanglescanner.views;
2 |
3 | import android.app.Activity;
4 | import android.content.Context;
5 | import android.view.LayoutInflater;
6 | import android.widget.FrameLayout;
7 | import com.facebook.react.bridge.WritableMap;
8 | import com.facebook.react.bridge.WritableNativeMap;
9 | import com.facebook.react.bridge.ReactContext;
10 | import com.facebook.react.uimanager.events.RCTEventEmitter;
11 |
12 | import com.rectanglescanner.R;
13 |
14 | public class MainView extends FrameLayout {
15 | private RNRectangleScannerView view;
16 |
17 | public static MainView instance = null;
18 |
19 | public static MainView getInstance() {
20 | return instance;
21 | }
22 |
23 | public static void createInstance(Context context, Activity activity) {
24 | instance = new MainView(context, activity);
25 | }
26 |
27 | private MainView(Context context, Activity activity) {
28 | super(context);
29 |
30 | LayoutInflater lf = (LayoutInflater) context.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
31 | FrameLayout frameLayout = (FrameLayout) lf.inflate(R.layout.activity_rectangle_scanner, null);
32 |
33 | view = new RNRectangleScannerView(context, -1, activity, frameLayout);
34 | view.setParent(this);
35 | addViewInLayout(view, 0, new FrameLayout.LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT));
36 | addViewInLayout(frameLayout, 1, view.getLayoutParams());
37 | }
38 |
39 | @Override
40 | protected void onLayout(boolean changed, int l, int t, int r, int b) {
41 | for (int i = 0; i < getChildCount(); i++) {
42 | getChildAt(i).layout(l, t, r, b);
43 | }
44 | }
45 |
46 | public void setEnableTorch(boolean enable) {
47 | view.setEnableTorch(enable);
48 | }
49 |
50 | public void setCapturedQuality(double quality) {
51 | view.setCapturedQuality(quality);
52 | }
53 |
54 | public void setFilterId(int filterId) {
55 | view.setFilterId(filterId);
56 | }
57 |
58 | public void startCamera() {
59 | view.startCamera();
60 | }
61 |
62 | public void stopCamera() {
63 | view.stopCamera();
64 | }
65 |
66 | public void cleanupCamera() {
67 | view.cleanupCamera();
68 | }
69 |
70 | public void capture() {
71 | view.capture();
72 | }
73 |
74 | public void focusCamera() {
75 | view.focusCamera();
76 | }
77 |
78 | public void deviceWasSetup(WritableMap config) {
79 | final ReactContext context = (ReactContext) getContext();
80 | context.getJSModule(RCTEventEmitter.class).receiveEvent(getId(), "onDeviceSetup", config);
81 | }
82 |
83 | public void torchWasChanged(boolean torchEnabled) {
84 | WritableMap map = new WritableNativeMap();
85 | map.putBoolean("enabled", torchEnabled);
86 | final ReactContext context = (ReactContext) getContext();
87 | context.getJSModule(RCTEventEmitter.class).receiveEvent(getId(), "onTorchChanged", map);
88 | }
89 |
90 | public void rectangleWasDetected(WritableMap detection) {
91 | final ReactContext context = (ReactContext) getContext();
92 | context.getJSModule(RCTEventEmitter.class).receiveEvent(getId(), "onRectangleDetected", detection);
93 | }
94 |
95 | public void pictureWasTaken(WritableMap pictureDetails) {
96 | final ReactContext context = (ReactContext) getContext();
97 | context.getJSModule(RCTEventEmitter.class).receiveEvent(getId(), "onPictureTaken", pictureDetails);
98 | }
99 |
100 | public void pictureWasProcessed(WritableMap pictureDetails) {
101 | final ReactContext context = (ReactContext) getContext();
102 | context.getJSModule(RCTEventEmitter.class).receiveEvent(getId(), "onPictureProcessed", pictureDetails);
103 | }
104 |
105 | public void pictureDidFailToProcess(WritableMap errorDetails) {
106 | final ReactContext context = (ReactContext) getContext();
107 | context.getJSModule(RCTEventEmitter.class).receiveEvent(getId(), "onErrorProcessingImage", errorDetails);
108 | }
109 | }
110 |
--------------------------------------------------------------------------------
/android/src/main/java/com/rectanglescanner/views/RNRectangleScannerView.java:
--------------------------------------------------------------------------------
1 | package com.rectanglescanner.views;
2 |
3 | import android.app.Activity;
4 | import android.content.Context;
5 | import android.util.Log;
6 | import android.widget.FrameLayout;
7 |
8 | import com.rectanglescanner.R;
9 | import com.rectanglescanner.helpers.CapturedImage;
10 | import com.facebook.react.bridge.WritableMap;
11 | import com.facebook.react.bridge.WritableNativeMap;
12 |
13 | import org.opencv.core.Core;
14 | import org.opencv.core.CvType;
15 | import org.opencv.core.Mat;
16 | import org.opencv.core.MatOfInt;
17 | import org.opencv.imgcodecs.Imgcodecs;
18 |
19 | import java.util.UUID;
20 | import java.io.File;
21 | import java.util.ArrayList;
22 |
23 | /**
24 | Created by Jake on Jan 6, 2020.
25 |
26 | Wraps up the camera and rectangle detection code into a simple interface.
27 | Allows you to call start, stop, cleanup, and capture. Also is responsible
28 | for deterining how to cache the output images.
29 | */
30 | public class RNRectangleScannerView extends RectangleDetectionController {
31 | private String cacheFolderName = "RNRectangleScanner";
32 | private double capturedQuality = 0.5;
33 |
34 | //================================================================================
35 | // Setup
36 | //================================================================================
37 |
38 | public RNRectangleScannerView(Context context, Integer numCam, Activity activity, FrameLayout frameLayout) {
39 | super(context, numCam, activity, frameLayout);
40 | }
41 |
42 | private MainView parentView = null;
43 |
44 | public void setParent(MainView view) {
45 | this.parentView = view;
46 | }
47 |
48 | /**
49 | Sets the jpeg quality of the output image
50 | */
51 | public void setCapturedQuality(double quality) {
52 | this.capturedQuality = quality;
53 | }
54 |
55 | /**
56 | Call to capture an image
57 | */
58 | public void capture() {
59 | captureImageLater();
60 | }
61 |
62 | /**
63 | Called after a picture was captured
64 | */
65 | private void pictureWasTaken(WritableMap pictureDetails) {
66 | Log.d(TAG, "picture taken");
67 | this.parentView.pictureWasTaken(pictureDetails);
68 | }
69 |
70 | /**
71 | Called after a picture was captured and finished processing
72 | */
73 | private void pictureWasProcessed(WritableMap pictureDetails) {
74 | Log.d(TAG, "picture processed");
75 | this.parentView.pictureWasProcessed(pictureDetails);
76 | }
77 |
78 | /**
79 | Called if the picture faiiled to be captured
80 | */
81 | private void pictureDidFailToProcess(WritableMap errorDetails) {
82 | Log.d(TAG, "picture failed to process");
83 | this.parentView.pictureDidFailToProcess(errorDetails);
84 | }
85 |
86 | /**
87 | Called after the torch/flash state was changed
88 | */
89 | @Override
90 | protected void torchWasChanged(boolean torchEnabled) {
91 | Log.d(TAG, "torch changed");
92 | this.parentView.torchWasChanged(torchEnabled);
93 | }
94 |
95 | /**
96 | Called after the camera and session are set up. This lets you check if a
97 | camera is found and permission is granted to use it.
98 | */
99 | @Override
100 | protected void deviceWasSetup(WritableMap config) {
101 | Log.d(TAG, "device setup");
102 | this.parentView.deviceWasSetup(config);
103 | }
104 |
105 |
106 | /**
107 | Called after a frame is processed and a rectangle was found
108 | */
109 | @Override
110 | public void rectangleWasDetected(WritableMap detection) {
111 | this.parentView.rectangleWasDetected(detection);
112 | }
113 |
114 |
115 | /**
116 | After an image is captured and cropped, this method is called
117 | */
118 | @Override
119 | public void onProcessedCapturedImage(CapturedImage capturedImage) {
120 | WritableMap pictureWasTakenConfig = new WritableNativeMap();
121 | WritableMap pictureWasProcessedConfig = new WritableNativeMap();
122 | String croppedImageFileName = null;
123 | String originalImageFileName = null;
124 | boolean hasCroppedImage = (capturedImage.processed != null);
125 | try {
126 | originalImageFileName = generateStoredFileName("O");
127 | if (hasCroppedImage) {
128 | croppedImageFileName = generateStoredFileName("C");
129 | } else {
130 | croppedImageFileName = originalImageFileName;
131 | }
132 | } catch(Exception e) {
133 | WritableMap folderError = new WritableNativeMap();
134 | folderError.putString("message", "Failed to create the cache directory");
135 | pictureDidFailToProcess(folderError);
136 | return;
137 | }
138 |
139 | pictureWasTakenConfig.putString("croppedImage", "file://" + croppedImageFileName);
140 | pictureWasTakenConfig.putString("initialImage", "file://" + originalImageFileName);
141 | pictureWasProcessedConfig.putString("croppedImage", "file://" + croppedImageFileName);
142 | pictureWasProcessedConfig.putString("initialImage", "file://" + originalImageFileName);
143 | pictureWasTaken(pictureWasTakenConfig);
144 |
145 | if (hasCroppedImage && !this.saveToDirectory(capturedImage.processed, croppedImageFileName)) {
146 | WritableMap fileError = new WritableNativeMap();
147 | fileError.putString("message", "Failed to write cropped image to cache");
148 | fileError.putString("filePath", croppedImageFileName);
149 | pictureDidFailToProcess(fileError);
150 | return;
151 | }
152 | if (!this.saveToDirectory(capturedImage.original, originalImageFileName)) {
153 | WritableMap fileError = new WritableNativeMap();
154 | fileError.putString("message", "Failed to write original image to cache");
155 | fileError.putString("filePath", originalImageFileName);
156 | pictureDidFailToProcess(fileError);
157 | return;
158 | }
159 |
160 | pictureWasProcessed(pictureWasProcessedConfig);
161 | capturedImage.release();
162 | Log.d(TAG, "Captured Images");
163 | }
164 |
165 | private String generateStoredFileName(String name) throws Exception {
166 | String folderDir = this.mContext.getCacheDir().toString();
167 | File folder = new File( folderDir + "/" + this.cacheFolderName);
168 | if (!folder.exists()) {
169 | boolean result = folder.mkdirs();
170 | if (result) {
171 | Log.d(TAG, "wrote: created folder " + folder.getPath());
172 | } else {
173 | Log.d(TAG, "Not possible to create folder");
174 | throw new Exception("Failed to create the cache directory");
175 | }
176 | }
177 | return folderDir + "/" + this.cacheFolderName + "/" + name + UUID.randomUUID() + ".png";
178 | }
179 |
180 | /**
181 | Saves a file to a folder
182 | */
183 | private boolean saveToDirectory(Mat doc, String fileName) {
184 | Mat endDoc = new Mat(doc.size(), CvType.CV_8UC4);
185 | doc.copyTo(endDoc);
186 | Core.flip(doc.t(), endDoc, 1);
187 | ArrayList parameters = new ArrayList();
188 | parameters.add(Imgcodecs.IMWRITE_JPEG_QUALITY);
189 | parameters.add((int)(this.capturedQuality * 100));
190 | MatOfInt par = new MatOfInt();
191 | par.fromList(parameters);
192 | boolean success = Imgcodecs.imwrite(fileName, endDoc, par);
193 |
194 | endDoc.release();
195 |
196 | return success;
197 | }
198 |
199 | }
200 |
--------------------------------------------------------------------------------
/android/src/main/java/com/rectanglescanner/views/RectangleDetectionController.java:
--------------------------------------------------------------------------------
1 | package com.rectanglescanner.views;
2 |
3 | import android.app.Activity;
4 | import android.content.Context;
5 | import android.os.Build;
6 | import android.os.HandlerThread;
7 | import android.os.Message;
8 | import android.util.Log;
9 | import android.view.Display;
10 | import android.view.WindowManager;
11 | import android.widget.FrameLayout;
12 |
13 | import com.rectanglescanner.R;
14 | import com.rectanglescanner.helpers.ImageProcessor;
15 | import com.rectanglescanner.helpers.ImageProcessorMessage;
16 | import com.rectanglescanner.helpers.CapturedImage;
17 | import com.facebook.react.bridge.WritableMap;
18 |
19 | import org.opencv.android.OpenCVLoader;
20 | import org.opencv.core.Mat;
21 |
22 | /**
23 | Created by Jake on Jan 6, 2020.
24 |
25 | Takes the output from the camera device controller and attempts to detect
26 | rectangles from the output. On capture, it will also crop the image.
27 | */
28 | public class RectangleDetectionController extends CameraDeviceController {
29 | private HandlerThread mImageThread;
30 | private ImageProcessor mImageProcessor;
31 | private int numberOfRectangles = 15;
32 | private boolean imageProcessorBusy = true;
33 | private int filterId = 1;
34 |
35 | public void setImageProcessorBusy(boolean isBusy) {
36 | this.imageProcessorBusy = isBusy;
37 | }
38 |
39 | public int getFilterId() {
40 | return this.filterId;
41 | }
42 |
43 | /**
44 | Sets the currently active filter
45 | */
46 | public void setFilterId(int filterId) {
47 | this.filterId = filterId;
48 | }
49 |
50 | //================================================================================
51 | // Setup
52 | //================================================================================
53 |
54 | public RectangleDetectionController(Context context, Integer numCam, Activity activity, FrameLayout frameLayout) {
55 | super(context, numCam, activity, frameLayout);
56 | initializeImageProcessor(context);
57 | }
58 |
59 | /**
60 | Sets up the image processor. It uses OpenCV so it needs to load that first
61 | */
62 | private void initializeImageProcessor(Context context) {
63 | mActivity.getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
64 |
65 | Display display = mActivity.getWindowManager().getDefaultDisplay();
66 | android.graphics.Point size = new android.graphics.Point();
67 | if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) {
68 | display.getRealSize(size);
69 | }
70 |
71 | if (OpenCVLoader.initLocal()) {
72 | Log.i(TAG, "OpenCV loaded successfully");
73 | } else {
74 | Log.e(TAG, "OpenCV initialization failed!");
75 | return;
76 | }
77 |
78 | if (mImageThread == null) {
79 | mImageThread = new HandlerThread("Worker Thread");
80 | mImageThread.start();
81 | }
82 |
83 | if (mImageProcessor == null) {
84 | mImageProcessor = new ImageProcessor(mImageThread.getLooper(), this, mContext);
85 | }
86 | this.setImageProcessorBusy(false);
87 | }
88 |
89 | //================================================================================
90 | // Image Detection
91 | //================================================================================
92 |
93 | /**
94 | Runs each frame the image is being pushed to the preview layer
95 | */
96 | @Override
97 | public void processOutput(Mat image) {
98 | detectRectangleFromImageLater(image);
99 | }
100 |
101 | /**
102 | Looks for a rectangle in the given image async
103 | */
104 | private void detectRectangleFromImageLater(Mat image) {
105 | if (!imageProcessorBusy) {
106 | setImageProcessorBusy(true);
107 | Message msg = mImageProcessor.obtainMessage();
108 | msg.obj = new ImageProcessorMessage("previewFrame", image);
109 | mImageProcessor.sendMessageDelayed(msg, 100);
110 | }
111 | }
112 |
113 | /**
114 | Called after a frame is processed and a rectangle was found
115 | */
116 | public void rectangleWasDetected(WritableMap detection) {}
117 |
118 | //================================================================================
119 | // Capture Image
120 | //================================================================================
121 |
122 | /**
123 | After an image is captured, this fuction is called and handles cropping the image
124 | */
125 | @Override
126 | public void handleCapturedImage(Mat capturedImage) {
127 | setImageProcessorBusy(true);
128 | Message msg = mImageProcessor.obtainMessage();
129 | msg.obj = new ImageProcessorMessage("pictureTaken", capturedImage);
130 | mImageProcessor.sendMessageAtFrontOfQueue(msg);
131 | }
132 |
133 | /**
134 | After an image is captured and cropped, this method is called
135 | */
136 | public void onProcessedCapturedImage(CapturedImage scannedDocument) {
137 |
138 | }
139 | }
140 |
--------------------------------------------------------------------------------
/android/src/main/res/layout/activity_rectangle_scanner.xml:
--------------------------------------------------------------------------------
1 |
10 |
21 |
22 |
--------------------------------------------------------------------------------
/example/.gitignore:
--------------------------------------------------------------------------------
1 | # Learn more https://docs.github.com/en/get-started/getting-started-with-git/ignoring-files
2 |
3 | # dependencies
4 | node_modules/
5 |
6 | # Expo
7 | .expo/
8 | dist/
9 | web-build/
10 |
11 | # Native
12 | *.orig.*
13 | *.jks
14 | *.p8
15 | *.p12
16 | *.key
17 | *.mobileprovision
18 |
19 | # Metro
20 | .metro-health-check*
21 |
22 | # debug
23 | npm-debug.*
24 | yarn-debug.*
25 | yarn-error.*
26 |
27 | # macOS
28 | .DS_Store
29 | *.pem
30 |
31 | # local env files
32 | .env*.local
33 |
34 | # typescript
35 | *.tsbuildinfo
--------------------------------------------------------------------------------
/example/App.js:
--------------------------------------------------------------------------------
1 | import { StatusBar } from 'expo-status-bar';
2 | import { StyleSheet, Text, View } from 'react-native';
3 | import ScanDocument from './src/ScanDocument';
4 |
5 | export default function App() {
6 | return (
7 |
8 | Open up App.js to start working on your app!
9 |
10 |
11 |
12 | );
13 | }
14 |
15 | const styles = StyleSheet.create({
16 | container: {
17 | flex: 1,
18 | backgroundColor: '#fff',
19 | alignItems: 'center',
20 | justifyContent: 'center',
21 | },
22 | });
23 |
--------------------------------------------------------------------------------
/example/app.json:
--------------------------------------------------------------------------------
1 | {
2 | "expo": {
3 | "name": "example",
4 | "slug": "example",
5 | "version": "1.0.0",
6 | "orientation": "portrait",
7 | "icon": "./assets/icon.png",
8 | "userInterfaceStyle": "light",
9 | "plugins": [
10 | [
11 | "expo-dev-launcher",
12 | {
13 | "launchMode": "most-recent"
14 | }
15 | ]
16 | ],
17 | "splash": {
18 | "image": "./assets/splash.png",
19 | "resizeMode": "contain",
20 | "backgroundColor": "#ffffff"
21 | },
22 | "ios": {
23 | "supportsTablet": true,
24 | "bundleIdentifier": "com.example.example",
25 | "infoPlist": {
26 | "NSCameraUsageDescription": "Example App requires to access camera for taking pictures of documents."
27 | }
28 | },
29 | "android": {
30 | "package": "com.example.example",
31 | "adaptiveIcon": {
32 | "foregroundImage": "./assets/adaptive-icon.png",
33 | "backgroundColor": "#ffffff"
34 | }
35 | },
36 | "web": {
37 | "favicon": "./assets/favicon.png"
38 | }
39 | }
40 | }
--------------------------------------------------------------------------------
/example/assets/adaptive-icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/HarvestProfit/react-native-rectangle-scanner/22fe3cfccf081f17b955d333796dd7d2fc50e8d9/example/assets/adaptive-icon.png
--------------------------------------------------------------------------------
/example/assets/favicon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/HarvestProfit/react-native-rectangle-scanner/22fe3cfccf081f17b955d333796dd7d2fc50e8d9/example/assets/favicon.png
--------------------------------------------------------------------------------
/example/assets/icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/HarvestProfit/react-native-rectangle-scanner/22fe3cfccf081f17b955d333796dd7d2fc50e8d9/example/assets/icon.png
--------------------------------------------------------------------------------
/example/assets/splash.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/HarvestProfit/react-native-rectangle-scanner/22fe3cfccf081f17b955d333796dd7d2fc50e8d9/example/assets/splash.png
--------------------------------------------------------------------------------
/example/babel.config.js:
--------------------------------------------------------------------------------
1 | module.exports = function(api) {
2 | api.cache(true);
3 | return {
4 | presets: ['babel-preset-expo'],
5 | };
6 | };
7 |
--------------------------------------------------------------------------------
/example/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "example",
3 | "version": "1.0.0",
4 | "main": "expo/AppEntry.js",
5 | "scripts": {
6 | "start": "expo start --dev-client --clear",
7 | "android": "expo run:android",
8 | "ios": "rm -f ios/.xcode.env.local && expo run:ios"
9 | },
10 | "dependencies": {
11 | "expo": "^50.0.17",
12 | "expo-dev-client": "~3.3.12",
13 | "expo-status-bar": "~1.12.1",
14 | "react": "18.2.0",
15 | "react-native": "0.73.6",
16 | "react-native-rectangle-scanner": "file:../"
17 | },
18 | "devDependencies": {
19 | "@babel/core": "^7.20.0"
20 | },
21 | "private": true
22 | }
--------------------------------------------------------------------------------
/example/src/ScanDocument/CameraControls.js:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import { SafeAreaView, Text, TouchableOpacity, View } from 'react-native';
3 | import { Filters } from 'react-native-rectangle-scanner';
4 |
5 | import { styles } from './styles';
6 |
7 | const CameraControls = ({ closeScanner, capture, isCapturing, flashIsAvailable, flashOn, setFlashOn, filterId, setFilterId }) => (
8 |
9 |
10 | {Filters.RECOMMENDED_PLATFORM_FILTERS.map((f) => (
11 | setFilterId(f.id)}>
12 | {f.name}
13 |
14 | ))}
15 |
16 |
17 |
18 |
19 |
24 | Cancel
25 |
26 |
27 |
28 | null : () => capture}
32 | />
33 |
34 |
35 |
36 | {flashIsAvailable && (
37 | setFlashOn(!flashOn)}
50 | >
51 | Flash: {flashOn ? 'ON' : 'OFF'}
52 |
53 | )}
54 |
55 |
56 |
57 |
58 | );
59 |
60 | export default CameraControls;
--------------------------------------------------------------------------------
/example/src/ScanDocument/DocumentScanner.js:
--------------------------------------------------------------------------------
1 | import React, { useRef, useState } from 'react';
2 | import { Animated, ActivityIndicator, Dimensions, Text, TouchableOpacity, View } from 'react-native';
3 | import Scanner, { Filters, FlashAnimation, RectangleOverlay } from 'react-native-rectangle-scanner';
4 |
5 | import { styles } from './styles';
6 | import CameraControls from './CameraControls';
7 |
8 | const JPEGQuality = 0.7;
9 |
10 | const DocumentScanner = ({ closeScanner, onScannedImage }) => {
11 | const [loadingCamera, setLoadingCamera] = useState(true);
12 | const [cameraError, setCameraError] = useState();
13 | const [cameraOn, setCameraOn] = useState(true);
14 | const [flashOn, setFlashOn] = useState(false);
15 | const [filterId, setFilterId] = useState(Filters.PLATFORM_DEFAULT_FILTER_ID);
16 | const [flashIsAvailable, setFlashIsAvailable] = useState(false);
17 | const [processingImage, setProcessingImage] = useState(false);
18 | const [previewSize, setPreviewSize] = useState({});
19 | const [detectedRectangle, setDetectedRectangle] = useState();
20 | // const flashScreenOnCaptureAnimation = useRef(new Animated.Value(0)).current;
21 | const cameraRef = useRef();
22 |
23 | const capture = () => {
24 | if (processingImage) return;
25 | setProcessingImage(true);
26 | cameraRef.current.capture();
27 | // FlashAnimation.triggerSnapAnimation(flashScreenOnCaptureAnimation);
28 | }
29 |
30 | const onPictureProcessed = (event) => {
31 | console.log('cropped, transformed, and added filters to captured image');
32 | onScannedImage(event);
33 | setProcessingImage(false);
34 | }
35 |
36 | const onDeviceSetup = (device) => {
37 | setLoadingCamera(false);
38 | setFlashIsAvailable(device.flashIsAvailable);
39 | if (!device.hasCamera) {
40 | setCameraError('Device does not have a camera');
41 | setCameraOn(false);
42 | } else if (!device.permissionToUseCamera) {
43 | setCameraError('App does not have permission to use the camera');
44 | setCameraOn(false);
45 | }
46 |
47 | const dimensions = Dimensions.get('window');
48 | setPreviewSize({
49 | height: `${device.previewHeightPercent * 100}%`,
50 | width: `${device.previewWidthPercent * 100}%`,
51 | marginTop: (1 - device.previewHeightPercent) * dimensions.height / 2,
52 | marginLeft: (1 - device.previewWidthPercent) * dimensions.width / 2,
53 | });
54 | }
55 |
56 | if (cameraOn) {
57 | return (
58 |
59 | console.log('picture captured...')}
61 | onPictureProcessed={onPictureProcessed}
62 | onErrorProcessingImage={(err) => console.error('Failed to capture scan', err?.message)}
63 | enableTorch={flashOn}
64 | filterId={filterId}
65 | ref={cameraRef}
66 | capturedQuality={JPEGQuality}
67 | onRectangleDetected={(value) => setDetectedRectangle(value.detectedRectangle)}
68 | onDeviceSetup={onDeviceSetup}
69 | onTorchChanged={({ enabled }) => setFlashOn(enabled)}
70 | style={styles.scanner}
71 | />
72 |
73 | {!processingImage && (
74 |
86 | )}
87 |
88 | {/* */}
89 |
90 | {loadingCamera && (
91 |
92 |
93 |
94 | Loading Camera
95 |
96 |
97 | )}
98 |
99 | {processingImage && (
100 |
101 |
102 |
103 |
104 | Processing
105 |
106 |
107 |
108 | )}
109 |
110 |
120 |
121 | );
122 | }
123 |
124 | return (
125 |
126 |
127 |
128 |
129 | Cancel
130 |
131 |
132 |
133 |
134 |
135 |
136 | {cameraError ? cameraError : 'Loading Camera'}
137 |
138 |
139 |
140 | );
141 | }
142 |
143 | export default DocumentScanner;
--------------------------------------------------------------------------------
/example/src/ScanDocument/index.js:
--------------------------------------------------------------------------------
1 | import React, { useState } from 'react';
2 | import { Button, Text } from 'react-native';
3 | import DocumentScanner from './DocumentScanner';
4 | import useIsMultiTasking from '../useIsMultiTasking';
5 | import { StatusBar } from 'expo-status-bar';
6 |
7 | const ScanDocument = () => {
8 |
9 | const [scannerIsOn, setScannerIsOn] = useState(false);
10 | const [scannedImage, setScannedImage] = useState();
11 |
12 | const onScannedImage = ({ croppedImage }) => {
13 | console.log('scanned an image!');
14 | setScannedImage(croppedImage);
15 | }
16 |
17 |
18 | const isMultiTasking = useIsMultiTasking();
19 |
20 | if (isMultiTasking) return Not allowed while multi tasking;
21 |
22 | if (!scannerIsOn) {
23 | if (!scannedImage) {
24 | return