├── .gitignore ├── License.md ├── README.md ├── package.json ├── plugin.xml ├── src ├── android │ ├── ImageDetectionPlugin.java │ └── libs │ │ ├── build-extras.gradle │ │ └── opencv-release.aar └── ios │ ├── ImageDetectionPlugin.h │ ├── ImageDetectionPlugin.mm │ ├── ImageUtils.h │ ├── ImageUtils.mm │ ├── libjpeg.a │ └── opencv2.framework.zip └── www └── ImageDetectionPlugin.js /.gitignore: -------------------------------------------------------------------------------- 1 | # Ignore MacOS DS_Store files 2 | .DS_Store 3 | 4 | # Ignore framework file since it's bigger than github limit 5 | src/ios/opencv2.framework 6 | -------------------------------------------------------------------------------- /License.md: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 Cloudoki 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ![not-maintenace](https://img.shields.io/maintenance/yes/2018) 2 | 3 | No Maintenance Intended 4 | 5 | --- 6 | 7 | # Image Detection Plugin (Android & iOS) 8 | 9 | This plugin allows the application to detect if an inputed image target is visible, or not, by matching the image features with the device camera features using [OpenCV](http://opencv.org/) (v3.1. on Android, v2.4.13 on iOS) It also presents the device camera preview in the background. 10 | 11 | ## cordova-android@7.0.0 12 | 13 | The branch [cordova@7.0.0](https://github.com/Cloudoki/ImageDetectionCordovaPlugin/tree/cordova%407.0.0) should work using this version. 14 | 15 | ## Changes 16 | 17 | - Added setting multiple patterns and loop functionality to detect which is visible 18 | 19 | ### Note (view mode) 20 | 21 | The plugin is aimed to work in **portrait mode**, should also work in landscape but no guarantees. 22 | 23 | ## Install 24 | 25 | To install the plugin in your current Cordova project run outside you project root 26 | 27 | ```shell 28 | git clone https://github.com/Cloudoki/ImageDetectionCordovaPlugin.git 29 | cd 30 | cordova plugin add ../ImageDetectionCordovaPlugin 31 | ``` 32 | 33 | ### Android 34 | 35 | - The plugin aims to be used with Android API >= 16 (4.1 Jelly Bean). 36 | 37 | ### IOS 38 | 39 | - The plugin aims to be used with iOS version >= 7. 40 | - **Important!** Go into src/ios folder and extract opencv2.framework from the zip file into the same folder. 41 | - Since iOS 10, `NSCameraUsageDescription` is required in the project Info.plist of any app that wants to use Camera. 42 | 43 | The plugin should add this automatically but in case this does not happen to add it, just open the project in XCode, go to the Info tab and add the `NSCameraUsageDescription` key with a string value that explain why your app need an access to the camera. 44 | 45 | ### Note 46 | 47 | In *config.xml* add Android and iOS target preference 48 | 49 | ```javascript 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | ``` 58 | 59 | And don't forget to set the background to be transparent or the preview may not shown up. 60 | Again in *config.xml* add the following preference. 61 | 62 | ```javascript 63 | 64 | ``` 65 | 66 | ## Usage 67 | 68 | The plugin offers the functions `startProcessing`, `setDetectionTimeout`, `isDetecting` and `setPattern`. 69 | 70 | **`startProcessing`** - the plugin will process the video frames captured by the camera if the inputed argument is `true`, if the argument is `false` no frames will be processed. Calls on success if the argument is set and on error if no value set. 71 | 72 | **Note:** the plugins start with this option true. 73 | 74 | ```javascript 75 | startProcessing(true or false, successCallback, errorCallback); 76 | ``` 77 | 78 | **`isDetecting`** - the plugin will callback on success function if detecting the pattern or on error function if it's not. The response will also say what index of the patterns is being detected, the detection rect coordinates and the center/centroid in a JSON object. Just parse it using `JSON.parse()`. 79 | 80 | ```javascript 81 | isDetecting(successCallback, errorCallback); 82 | ``` 83 | 84 | ```json 85 | // JSON RESPONSE EXAMPLE 86 | { 87 | "message": "pattern detected", 88 | "index": 0, 89 | "coords": { 90 | "1": { "x": 170.839401, "y": 181.510239 }, 91 | "2": { "x": 293.745239, "y": 180.525345 }, 92 | "3": { "x": 301.409363, "y": 352.518280 }, 93 | "4": { "x": 171.494492, "y": 360.808655 } 94 | }, 95 | "center": { 96 | "x": 234.956223, "y": 268.231842 97 | } 98 | } 99 | ``` 100 | 101 | **`setDetectionTimeout`** - this function will set a timeout (**in seconds**) in which the processing of the frames will not occur. Calls on success if the argument is set and on error if no value set. 102 | 103 | ```javascript 104 | setDetectionTimeout(timeout, successCallback, errorCallback); 105 | ``` 106 | 107 | **`setPatterns`** - sets the patterns targets to be detected. Calls on success if the patterns are set and on error if one or more patterns fail to be set. The input patterns must be an array of base64 image string. 108 | 109 | ```javascript 110 | setPatterns([base64image, ...], successCallback, errorCallback); 111 | ``` 112 | 113 | ## Usage example 114 | 115 | ```javascript 116 | var imgDetectionPlugin = window.plugins.ImageDetectionPlugin || new ImageDetectionPlugin(); 117 | 118 | imgDetectionPlugin.startProcessing(true, function(success){console.log(success);}, function(error){console.log(error);}); 119 | 120 | imgDetectionPlugin.isDetecting(function(success){ 121 | console.log(success); 122 | var resp = JSON.parse(success); 123 | console.log(resp.index, "image detected - ", indexes[resp.index], resp.coords, resp.center); 124 | }, function(error){console.log(error);}); 125 | 126 | function setAllPatterns(patterns) { 127 | imgDetectionPlugin.setPatterns(patterns, function(success){console.log(success);}, function(error){console.log(error);}); 128 | } 129 | 130 | var loadAllImg = 0; 131 | var patternsHolder = []; 132 | var indexes = {}; 133 | var limit = 3; 134 | 135 | function ToDataURL (self) { 136 | var canvas = document.createElement('canvas'); 137 | var ctx = canvas.getContext('2d'); 138 | var dataURL; 139 | canvas.height = self.height; 140 | canvas.width = self.width; 141 | ctx.drawImage(self, 0, 0); 142 | dataURL = canvas.toDataURL("image/jpeg", 0.8); 143 | patternsHolder.push(dataURL); 144 | indexes[loadAllImg] = self.src.substr(self.src.lastIndexOf("/") + 1); 145 | loadAllImg += 1; 146 | console.log("!!!", loadAllImg, indexes); 147 | if(loadAllImg == limit){ 148 | console.log("patterns set", patternsHolder); 149 | setAllPatterns(patternsHolder); 150 | } 151 | canvas = null; 152 | } 153 | 154 | var img = new Image(); 155 | img.crossOrigin = "Anonymous"; 156 | img.onload = function(){ 157 | ToDataURL(this) 158 | }; 159 | img.src = "img/patterns/target1.jpg"; 160 | 161 | var img = new Image(); 162 | img.crossOrigin = "Anonymous"; 163 | img.onload = function(){ 164 | ToDataURL(this) 165 | }; 166 | img.src = "img/patterns/target2.jpg"; 167 | 168 | var img = new Image(); 169 | img.crossOrigin = "Anonymous"; 170 | img.onload = function(){ 171 | ToDataURL(this) 172 | }; 173 | img.src = "img/patterns/target3.jpg"; 174 | 175 | imgDetectionPlugin.setDetectionTimeout(2, function(success){console.log(success);}, function(error){console.log(error);}); 176 | ``` 177 | 178 | ## Demo Project 179 | 180 | [ImageDetectionDemoApp](https://github.com/a31859/ImageDetectionDemoApp) 181 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "com.cloudoki.imagedetectionplugin", 3 | "version": "0.0.2", 4 | "description": "The ImageDetectionPlugin for Cordova enables the use of OpenCV SDK to detect an inputed image.", 5 | "cordova": { 6 | "id": "com.cloudoki.imagedetectionplugin", 7 | "platforms": [ 8 | "android", 9 | "ios" 10 | ] 11 | }, 12 | "repository": { 13 | "type": "git", 14 | "url": "git+https://github.com/Cloudoki/ImageDetectionCordovaPlugin.git" 15 | }, 16 | "keywords": [ 17 | "OpenCV", 18 | "image detection", 19 | "image matching", 20 | "ecosystem:cordova", 21 | "cordova-android", 22 | "cordova-ios" 23 | ], 24 | "engines": [ 25 | { 26 | "name": "cordova", 27 | "version": ">=3.4.0" 28 | } 29 | ], 30 | "author": "Délio Amaral", 31 | "license": "See License.md for details of the license", 32 | "bugs": { 33 | "url": "https://github.com/Cloudoki/ImageDetectionCordovaPlugin/issues" 34 | }, 35 | "homepage": "https://github.com/Cloudoki/ImageDetectionCordovaPlugin#readme" 36 | } 37 | -------------------------------------------------------------------------------- /plugin.xml: -------------------------------------------------------------------------------- 1 | 2 | 6 | Image Detection Plugin 7 | 8 | The ImageDetectionPlugin for Cordova enables the use of OpenCV SDK to detect an inputed image. 9 | 10 | Délio Amaral 11 | 12 | OpenCV, image detection, image matching 13 | 14 | https://github.com/Cloudoki/ImageDetectionCordovaPlugin 15 | 16 | See License.md for details of the license 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | This application needs camera access to perform image detection. 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | -------------------------------------------------------------------------------- /src/android/ImageDetectionPlugin.java: -------------------------------------------------------------------------------- 1 | package com.cloudoki.imagedetectionplugin; 2 | 3 | import android.Manifest; 4 | import android.app.Activity; 5 | import android.app.AlertDialog; 6 | import android.content.DialogInterface; 7 | import android.content.pm.ActivityInfo; 8 | import android.content.pm.PackageManager; 9 | import android.graphics.Bitmap; 10 | import android.graphics.BitmapFactory; 11 | import android.graphics.ImageFormat; 12 | import android.graphics.PixelFormat; 13 | import android.hardware.Camera; 14 | import android.os.Build; 15 | import android.os.Environment; 16 | import android.support.v4.app.ActivityCompat; 17 | import android.support.v4.content.ContextCompat; 18 | import android.util.Base64; 19 | import android.util.Log; 20 | import android.view.Gravity; 21 | import android.view.Surface; 22 | import android.view.SurfaceHolder; 23 | import android.view.SurfaceView; 24 | import android.view.View; 25 | import android.view.ViewGroup; 26 | import android.view.WindowManager; 27 | import android.widget.FrameLayout; 28 | 29 | import org.apache.cordova.CallbackContext; 30 | import org.apache.cordova.CordovaInterface; 31 | import org.apache.cordova.CordovaPlugin; 32 | import org.apache.cordova.CordovaWebView; 33 | import org.apache.cordova.PluginResult; 34 | import org.json.JSONArray; 35 | import org.json.JSONException; 36 | import org.opencv.android.BaseLoaderCallback; 37 | import org.opencv.android.CameraBridgeViewBase; 38 | import org.opencv.android.LoaderCallbackInterface; 39 | import org.opencv.android.OpenCVLoader; 40 | import org.opencv.android.Utils; 41 | import org.opencv.calib3d.Calib3d; 42 | import org.opencv.core.Core; 43 | import org.opencv.core.CvType; 44 | import org.opencv.core.Mat; 45 | import org.opencv.core.MatOfByte; 46 | import org.opencv.core.MatOfDMatch; 47 | import org.opencv.core.MatOfKeyPoint; 48 | import org.opencv.core.MatOfPoint2f; 49 | import org.opencv.core.Point; 50 | import org.opencv.core.Scalar; 51 | import org.opencv.core.Size; 52 | import org.opencv.core.DMatch; 53 | import org.opencv.features2d.DescriptorExtractor; 54 | import org.opencv.features2d.DescriptorMatcher; 55 | import org.opencv.features2d.FeatureDetector; 56 | import org.opencv.features2d.Features2d; 57 | import org.opencv.core.KeyPoint; 58 | import org.opencv.imgcodecs.Imgcodecs; 59 | import org.opencv.imgproc.Imgproc; 60 | 61 | import java.util.ArrayList; 62 | import java.util.Date; 63 | import java.util.LinkedList; 64 | import java.util.List; 65 | 66 | public class ImageDetectionPlugin extends CordovaPlugin implements SurfaceHolder.Callback { 67 | 68 | private static final String TAG = "OpenCV::Activity"; 69 | private static final int REQUEST_CAMERA_PERMISSIONS = 133; 70 | private static final int CAMERA_ID_ANY = -1; 71 | private static final int CAMERA_ID_BACK = 99; 72 | private static final int CAMERA_ID_FRONT = 98; 73 | 74 | @SuppressWarnings("deprecation") 75 | private Camera camera; 76 | private Activity activity; 77 | private SurfaceHolder surfaceHolder; 78 | private Mat mYuv; 79 | private Mat desc2; 80 | private FeatureDetector orbDetector; 81 | private DescriptorExtractor orbDescriptor; 82 | private MatOfKeyPoint kp2; 83 | private MatOfDMatch matches; 84 | private CallbackContext cb; 85 | private Date last_time; 86 | private boolean processFrames = true, thread_over = true, debug = false, 87 | called_success_detection = false, called_failed_detection = true, 88 | previewing = false, save_files = false; 89 | private List detection = new ArrayList<>(); 90 | 91 | private List triggers = new ArrayList<>(); 92 | private List triggers_kps = new ArrayList<>(); 93 | private List triggers_descs = new ArrayList<>(); 94 | private int trigger_size = -1, detected_index = -1; 95 | 96 | private double timeout = 0.0; 97 | private int cameraId = -1; 98 | private int mCameraIndex = CAMERA_ID_ANY; 99 | 100 | private BaseLoaderCallback mLoaderCallback; 101 | private FrameLayout cameraFrameLayout; 102 | 103 | private int count = 0; 104 | private String[] PERMISSIONS_STORAGE = { 105 | Manifest.permission.READ_EXTERNAL_STORAGE, 106 | Manifest.permission.WRITE_EXTERNAL_STORAGE 107 | }; 108 | 109 | private String coords; 110 | private int screenWidth = 1, screenHeight = 1; 111 | 112 | @SuppressWarnings("deprecation") 113 | private static class JavaCameraSizeAccessor implements CameraBridgeViewBase.ListItemAccessor { 114 | 115 | @Override 116 | public int getWidth(Object obj) { 117 | Camera.Size size = (Camera.Size) obj; 118 | return size.width; 119 | } 120 | 121 | @Override 122 | public int getHeight(Object obj) { 123 | Camera.Size size = (Camera.Size) obj; 124 | return size.height; 125 | } 126 | } 127 | 128 | @Override 129 | public void initialize(CordovaInterface cordova, CordovaWebView webView) { 130 | activity = cordova.getActivity(); 131 | 132 | super.initialize(cordova, webView); 133 | 134 | mLoaderCallback = new BaseLoaderCallback(activity) { 135 | @Override 136 | public void onManagerConnected(int status) { 137 | switch (status) { 138 | case LoaderCallbackInterface.SUCCESS: 139 | { 140 | Log.i(TAG, "OpenCV loaded successfully"); 141 | } break; 142 | default: 143 | { 144 | super.onManagerConnected(status); 145 | } break; 146 | } 147 | } 148 | }; 149 | 150 | activity.getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN); 151 | activity.getWindow().setFormat(PixelFormat.TRANSLUCENT); 152 | activity.getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); 153 | activity.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT); 154 | 155 | SurfaceView surfaceView = new SurfaceView(activity.getApplicationContext()); 156 | 157 | FrameLayout.LayoutParams params = new FrameLayout.LayoutParams( 158 | ViewGroup.LayoutParams.MATCH_PARENT, 159 | ViewGroup.LayoutParams.MATCH_PARENT, 160 | Gravity.CENTER); 161 | 162 | cameraFrameLayout = new FrameLayout(activity.getApplicationContext()); 163 | 164 | cameraFrameLayout.addView(surfaceView); 165 | 166 | activity.getWindow().addContentView(cameraFrameLayout, params); 167 | 168 | surfaceHolder = surfaceView.getHolder(); 169 | surfaceHolder.addCallback(this); 170 | 171 | sendViewToBack(cameraFrameLayout); 172 | 173 | setCameraIndex(CAMERA_ID_BACK); 174 | openCamera(); 175 | 176 | cameraFrameLayout.setVisibility(View.INVISIBLE); 177 | } 178 | 179 | @Override 180 | public boolean execute(String action, JSONArray data, 181 | CallbackContext callbackContext) throws JSONException { 182 | 183 | if (action.equals("greet")) { 184 | Log.i(TAG, "greet called"); 185 | String name = data.getString(0); 186 | if(name != null && !name.isEmpty()) { 187 | String message = "Hello, " + name; 188 | callbackContext.success(message); 189 | } else { 190 | callbackContext.error(""); 191 | } 192 | return true; 193 | } 194 | if (action.equals("isDetecting")) { 195 | Log.i(TAG, "isDetecting called"); 196 | cb = callbackContext; 197 | return true; 198 | } 199 | if(action.equals("setPatterns")) { 200 | Log.i(TAG, "setPatterns called"); 201 | final JSONArray inputData = data; 202 | final CallbackContext cbContext = callbackContext; 203 | cordova.getThreadPool().execute(new Runnable() { 204 | public void run() { 205 | // clear before adding triggers 206 | triggers.clear(); 207 | triggers_kps.clear(); 208 | triggers_descs.clear(); 209 | 210 | String message = "Pattens to be set - " + inputData.length(); 211 | message += "\nBefore set pattern " + triggers.size(); 212 | setBase64Pattern(inputData); 213 | message += "\nAfter set pattern " + triggers.size(); 214 | if(inputData.length() == triggers.size()) { 215 | trigger_size = triggers.size(); 216 | message += "\nPatterns set - " + triggers.size(); 217 | cbContext.success(message); 218 | } else { 219 | message += "\nOne or more patterns failed to be set."; 220 | cbContext.error(message); 221 | } 222 | } 223 | }); 224 | return true; 225 | } 226 | if(action.equals("startProcessing")) { 227 | Log.i(TAG, "startProcessing called"); 228 | String message; 229 | boolean argVal; 230 | try { 231 | argVal = data.getBoolean(0); 232 | screenHeight = data.getInt(1); 233 | screenWidth = data.getInt(2); 234 | } catch (JSONException je) { 235 | argVal = true; 236 | Log.e(TAG, je.getMessage()); 237 | } 238 | if(argVal) { 239 | processFrames = true; 240 | message = "Frame processing set to 'true', screen size is h - " + screenHeight + ", w - " + screenWidth; 241 | callbackContext.success(message); 242 | } else { 243 | processFrames = false; 244 | message = "Frame processing set to 'false', screen size is h - " + screenHeight + ", w - " + screenWidth; 245 | callbackContext.error(message); 246 | } 247 | return true; 248 | } 249 | if(action.equals("setDetectionTimeout")) { 250 | Log.i(TAG, "setDetectionTimeout called"); 251 | String message; 252 | double argVal; 253 | try { 254 | argVal = data.getDouble(0); 255 | } catch (JSONException je) { 256 | argVal = -1; 257 | Log.e(TAG, je.getMessage()); 258 | } 259 | if(argVal >= 0) { 260 | timeout = argVal; 261 | message = "Processing timeout set to " + timeout; 262 | callbackContext.success(message); 263 | } else { 264 | message = "No value or timeout value negative."; 265 | callbackContext.error(message); 266 | } 267 | return true; 268 | } 269 | return false; 270 | } 271 | 272 | @Override 273 | public void onStart() 274 | { 275 | super.onStart(); 276 | 277 | Log.i(TAG, "onStart(): Activity starting"); 278 | 279 | if(!checkCameraPermission()) { 280 | ActivityCompat.requestPermissions(activity, 281 | new String[]{Manifest.permission.CAMERA}, 282 | REQUEST_CAMERA_PERMISSIONS); 283 | } 284 | 285 | if(save_files) { 286 | int permission = ActivityCompat.checkSelfPermission(activity, Manifest.permission.WRITE_EXTERNAL_STORAGE); 287 | 288 | if (permission != PackageManager.PERMISSION_GRANTED) { 289 | // We don't have permission so prompt the user 290 | int REQUEST_EXTERNAL_STORAGE = 1; 291 | ActivityCompat.requestPermissions( 292 | activity, 293 | PERMISSIONS_STORAGE, 294 | REQUEST_EXTERNAL_STORAGE 295 | ); 296 | } 297 | } 298 | 299 | thread_over = true; 300 | debug = false; 301 | called_success_detection = false; 302 | called_failed_detection = true; 303 | 304 | last_time = new Date(); 305 | 306 | new android.os.Handler().postDelayed( 307 | new Runnable() { 308 | public void run() { 309 | cameraFrameLayout.setVisibility(View.VISIBLE); 310 | cameraFrameLayout.invalidate(); 311 | } 312 | }, 2000); 313 | } 314 | 315 | public static void sendViewToBack(final View child) { 316 | final ViewGroup parent = (ViewGroup)child.getParent(); 317 | if (null != parent) { 318 | parent.removeView(child); 319 | parent.addView(child, 0); 320 | } 321 | } 322 | 323 | private boolean checkCameraPermission() { 324 | return ContextCompat.checkSelfPermission(activity, Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED; 325 | } 326 | 327 | @Override 328 | public void onPause(boolean multitasking) { 329 | super.onPause(multitasking); 330 | } 331 | 332 | @Override 333 | public void onResume(boolean multitasking) { 334 | super.onResume(multitasking); 335 | if (!OpenCVLoader.initDebug()) { 336 | Log.d(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization"); 337 | OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_1_0, activity, mLoaderCallback); 338 | } else { 339 | Log.d(TAG, "OpenCV library found inside package. Using it!"); 340 | mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS); 341 | } 342 | if (camera == null) { 343 | openCamera(); 344 | } 345 | } 346 | 347 | @Override 348 | public void onStop() { 349 | super.onStop(); 350 | } 351 | 352 | @Override 353 | public void onDestroy() { 354 | super.onDestroy(); 355 | } 356 | 357 | @Override 358 | public void surfaceCreated(SurfaceHolder holder) { 359 | matches = new MatOfDMatch(); 360 | orbDetector = FeatureDetector.create(FeatureDetector.ORB); 361 | orbDescriptor = DescriptorExtractor.create(DescriptorExtractor.ORB); 362 | kp2 = new MatOfKeyPoint(); 363 | desc2 = new Mat(); 364 | } 365 | 366 | @Override 367 | public void surfaceChanged(SurfaceHolder holder, int format, int height, int width) { 368 | if(previewing){ 369 | camera.stopPreview(); 370 | previewing = false; 371 | } 372 | 373 | if (camera != null){ 374 | boolean result = initializeCamera(height, width); 375 | if( !result ) { 376 | AlertDialog.Builder builder = new AlertDialog.Builder(activity); 377 | builder.setTitle("An error occurred") 378 | .setMessage("An error occurred while trying to open the camera.") 379 | .setCancelable(false) 380 | .setPositiveButton("Ok", new DialogInterface.OnClickListener() { 381 | public void onClick(DialogInterface dialog, int id) { 382 | activity.finish(); 383 | } 384 | }); 385 | AlertDialog alert = builder.create(); 386 | alert.show(); 387 | } 388 | previewing = true; 389 | } 390 | } 391 | 392 | @Override 393 | public void surfaceDestroyed(SurfaceHolder holder) { 394 | if (camera != null) { 395 | camera.setPreviewCallback(null); 396 | camera.stopPreview(); 397 | camera.release(); 398 | camera = null; 399 | previewing = false; 400 | } 401 | } 402 | 403 | private void setCameraIndex(int index) { 404 | mCameraIndex = index; 405 | } 406 | 407 | @SuppressWarnings("deprecation") 408 | private void openCamera() { 409 | camera = null; 410 | 411 | if (mCameraIndex == CAMERA_ID_ANY) { 412 | Log.d(TAG, "Trying to open camera with old open()"); 413 | try { 414 | camera = Camera.open(); 415 | } catch (Exception e) { 416 | Log.e(TAG, "Camera is not available (in use or does not exist): " + e.getLocalizedMessage()); 417 | } 418 | 419 | if (camera == null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) { 420 | boolean connected = false; 421 | for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) { 422 | Log.d(TAG, "Trying to open camera with new open(" + camIdx + ")"); 423 | try { 424 | camera = Camera.open(camIdx); 425 | connected = true; 426 | } catch (RuntimeException e) { 427 | Log.e(TAG, "Camera #" + camIdx + "failed to open: " + e.getLocalizedMessage()); 428 | } 429 | if (connected) break; 430 | } 431 | } 432 | } else { 433 | if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) { 434 | int localCameraIndex = mCameraIndex; 435 | if (mCameraIndex == CAMERA_ID_BACK) { 436 | Log.i(TAG, "Trying to open back camera"); 437 | Camera.CameraInfo cameraInfo = new Camera.CameraInfo(); 438 | for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) { 439 | Camera.getCameraInfo(camIdx, cameraInfo); 440 | if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) { 441 | localCameraIndex = camIdx; 442 | break; 443 | } 444 | } 445 | } else if (mCameraIndex == CAMERA_ID_FRONT) { 446 | Log.i(TAG, "Trying to open front camera"); 447 | Camera.CameraInfo cameraInfo = new Camera.CameraInfo(); 448 | for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) { 449 | Camera.getCameraInfo(camIdx, cameraInfo); 450 | if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) { 451 | localCameraIndex = camIdx; 452 | break; 453 | } 454 | } 455 | } 456 | if (localCameraIndex == CAMERA_ID_BACK) { 457 | Log.e(TAG, "Back camera not found!"); 458 | } else if (localCameraIndex == CAMERA_ID_FRONT) { 459 | Log.e(TAG, "Front camera not found!"); 460 | } else { 461 | Log.d(TAG, "Trying to open camera with new open(" + localCameraIndex + ")"); 462 | try { 463 | camera = Camera.open(localCameraIndex); 464 | } catch (RuntimeException e) { 465 | Log.e(TAG, "Camera #" + localCameraIndex + "failed to open: " + e.getLocalizedMessage()); 466 | } 467 | } 468 | cameraId = localCameraIndex; 469 | } 470 | } 471 | } 472 | 473 | @SuppressWarnings("deprecation") 474 | private boolean initializeCamera(int height, int width) { 475 | Log.d(TAG, "Initialize java camera"); 476 | boolean result = true; 477 | synchronized (this) { 478 | if (camera == null) 479 | return false; 480 | 481 | /* Now set camera parameters */ 482 | try { 483 | Camera.Parameters params = camera.getParameters(); 484 | Log.d(TAG, "getSupportedPreviewSizes()"); 485 | List sizes = params.getSupportedPreviewSizes(); 486 | 487 | if (sizes != null) { 488 | /* Select the size that fits surface considering maximum size allowed */ 489 | Size frameSize = calculateCameraFrameSize(sizes, new JavaCameraSizeAccessor(), height, width); 490 | 491 | params.setPreviewFormat(ImageFormat.NV21); 492 | Log.d(TAG, "Set preview size to " + frameSize.width + "x" + frameSize.height); 493 | params.setPreviewSize((int)frameSize.width, (int)frameSize.height); 494 | 495 | if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH && !android.os.Build.MODEL.equals("GT-I9100")) 496 | params.setRecordingHint(true); 497 | 498 | List FocusModes = params.getSupportedFocusModes(); 499 | if (FocusModes != null && FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) 500 | { 501 | Log.d(TAG, "Set focus mode continuous video " + Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO ); 502 | params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO); 503 | } 504 | else if(FocusModes != null && FocusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) { 505 | Log.d(TAG, "Set focus mode auto " + Camera.Parameters.FOCUS_MODE_AUTO ); 506 | params.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO); 507 | } 508 | 509 | if(activity != null) { 510 | Camera.CameraInfo info = new Camera.CameraInfo(); 511 | Camera.getCameraInfo(cameraId, info); 512 | int cameraRotationOffset = info.orientation; 513 | 514 | int rotation = activity.getWindowManager().getDefaultDisplay().getRotation(); 515 | int degrees = 0; 516 | switch (rotation) { 517 | case Surface.ROTATION_0: 518 | degrees = 0; 519 | break; // Natural orientation 520 | case Surface.ROTATION_90: 521 | degrees = 90; 522 | break; // Landscape left 523 | case Surface.ROTATION_180: 524 | degrees = 180; 525 | break;// Upside down 526 | case Surface.ROTATION_270: 527 | degrees = 270; 528 | break;// Landscape right 529 | } 530 | int displayRotation; 531 | if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) { 532 | displayRotation = (cameraRotationOffset + degrees) % 360; 533 | displayRotation = (360 - displayRotation) % 360; // compensate the mirror 534 | } else { // back-facing 535 | displayRotation = (cameraRotationOffset - degrees + 360) % 360; 536 | } 537 | 538 | Log.v(TAG, "rotation cam / phone = displayRotation: " + cameraRotationOffset + " / " + degrees + " = " 539 | + displayRotation); 540 | 541 | camera.setDisplayOrientation(displayRotation); 542 | 543 | int rotate; 544 | if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) { 545 | rotate = (360 + cameraRotationOffset + degrees) % 360; 546 | } else { 547 | rotate = (360 + cameraRotationOffset - degrees) % 360; 548 | } 549 | 550 | Log.v(TAG, "screenshot rotation: " + cameraRotationOffset + " / " + degrees + " = " + rotate); 551 | 552 | params.setRotation(rotate); 553 | 554 | params.setPreviewFrameRate(24);// set camera preview 555 | 556 | camera.setParameters(params); 557 | camera.setPreviewDisplay(surfaceHolder); 558 | camera.setPreviewCallback(previewCallback); 559 | } 560 | 561 | /* Finally we are ready to start the preview */ 562 | Log.d(TAG, "startPreview"); 563 | camera.startPreview(); 564 | } 565 | else 566 | result = false; 567 | } catch (Exception e) { 568 | result = false; 569 | e.printStackTrace(); 570 | } 571 | } 572 | 573 | return result; 574 | } 575 | 576 | private Size calculateCameraFrameSize(List supportedSizes, CameraBridgeViewBase.ListItemAccessor accessor, int surfaceHeight, int surfaceWidth) { 577 | int calcWidth = 0; 578 | int calcHeight = 0; 579 | 580 | for (Object size : supportedSizes) { 581 | int width = accessor.getWidth(size); 582 | int height = accessor.getHeight(size); 583 | 584 | if (width <= surfaceWidth && height <= surfaceHeight) { 585 | if (width >= calcWidth && height >= calcHeight) { 586 | calcWidth = width; 587 | calcHeight = height; 588 | } 589 | } 590 | } 591 | 592 | return new Size(calcWidth, calcHeight); 593 | } 594 | 595 | @SuppressWarnings("deprecation") 596 | private final Camera.PreviewCallback previewCallback = new Camera.PreviewCallback() { 597 | @Override 598 | public void onPreviewFrame(byte[] data, Camera camera) { 599 | //Log.d(TAG, "ON Preview frame"); 600 | 601 | Date current_time = new Date(); 602 | double time_passed = Math.abs(current_time.getTime() - last_time.getTime())/1000.0; 603 | 604 | boolean hasTriggerSet = false; 605 | if(!triggers.isEmpty()){ 606 | hasTriggerSet = triggers.size() == trigger_size; 607 | } 608 | 609 | if(processFrames && time_passed > timeout && hasTriggerSet) { 610 | if (thread_over) { 611 | thread_over = false; 612 | 613 | if (mYuv != null) mYuv.release(); 614 | Camera.Parameters params = camera.getParameters(); 615 | mYuv = new Mat(params.getPreviewSize().height, params.getPreviewSize().width, CvType.CV_8UC1); 616 | mYuv.put(0, 0, data); 617 | 618 | for (int i = 0; i < triggers.size(); i++) { 619 | Mat pattern = triggers.get(i); 620 | MatOfKeyPoint kp1 = triggers_kps.get(i); 621 | Mat desc1 = triggers_descs.get(i); 622 | processFrame(pattern, kp1, desc1, i); 623 | } 624 | } 625 | //update time and reset timeout 626 | last_time = current_time; 627 | timeout = 0.0; 628 | } 629 | 630 | } 631 | }; 632 | 633 | private void processFrame(Mat _pattern, MatOfKeyPoint _kp1, Mat _desc1, int _index) { 634 | final Mat pattern = _pattern; 635 | final MatOfKeyPoint kp1 = _kp1; 636 | final Mat desc1 = _desc1; 637 | final int index = _index; 638 | cordova.getThreadPool().execute(new Runnable() { 639 | public void run() { 640 | Mat gray = mYuv.submat(0, mYuv.rows(), 0, mYuv.cols()).t(); 641 | Core.flip(gray, gray, 1); 642 | DescriptorMatcher matcherHamming = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT); 643 | 644 | if(save_files) { 645 | if (count % 10 == 0) { 646 | String extStorageDirectory = Environment.getExternalStorageDirectory().toString(); 647 | Imgcodecs.imwrite(extStorageDirectory + "/pic" + count + ".png", gray); 648 | Log.i("### FILE ###", "File saved to " + extStorageDirectory + "/pic" + count + ".png"); 649 | } 650 | count++; 651 | } 652 | 653 | //Imgproc.equalizeHist(gray, gray); 654 | 655 | orbDetector.detect(gray, kp2); 656 | orbDescriptor.compute(gray, kp2, desc2); 657 | 658 | if (!desc1.empty() && !desc2.empty()) { 659 | matcherHamming.match(desc1, desc2, matches); 660 | 661 | List matchesList = matches.toList(); 662 | LinkedList good_matches = new LinkedList<>(); 663 | MatOfDMatch gm = new MatOfDMatch(); 664 | 665 | double minDistance = 1000; 666 | 667 | int rowCount; 668 | 669 | if(desc1.rows() < matchesList.size()) 670 | rowCount = desc1.rows(); 671 | else 672 | rowCount = matchesList.size(); 673 | 674 | for (int i = 0; i < rowCount; i++) { 675 | double dist = matchesList.get(i).distance; 676 | if (dist < minDistance) { 677 | minDistance = dist; 678 | } 679 | } 680 | 681 | LinkedList good_matches_reduced = new LinkedList<>(); 682 | MatOfDMatch gmr = new MatOfDMatch(); 683 | double upperBound = 2 * minDistance; 684 | for (int i = 0; i < rowCount; i++) { 685 | if (matchesList.get(i).distance < upperBound && good_matches.size() < 500) { 686 | good_matches.addLast(matchesList.get(i)); 687 | if(i < 10 && debug) 688 | { 689 | good_matches_reduced.addLast(matchesList.get(i)); 690 | } 691 | } 692 | } 693 | gm.fromList(good_matches); 694 | if(debug) { 695 | gmr.fromList(good_matches_reduced); 696 | } 697 | 698 | if (good_matches.size() >= 8) { 699 | Mat img_matches = null; 700 | if (debug) { 701 | img_matches = gray.clone(); 702 | Features2d.drawMatches( 703 | pattern, 704 | kp1, 705 | gray, 706 | kp2, 707 | gmr, 708 | img_matches, 709 | new Scalar(255, 0, 0), 710 | new Scalar(0, 0, 255), 711 | new MatOfByte(), 712 | 2); 713 | } 714 | 715 | LinkedList objList = new LinkedList<>(); 716 | LinkedList sceneList = new LinkedList<>(); 717 | 718 | List keypoints_objList = kp1.toList(); 719 | List keypoints_sceneList = kp2.toList(); 720 | 721 | for (int i = 0; i < good_matches.size(); i++) { 722 | objList.addLast(keypoints_objList.get(good_matches.get(i).queryIdx).pt); 723 | sceneList.addLast(keypoints_sceneList.get(good_matches.get(i).trainIdx).pt); 724 | } 725 | 726 | MatOfPoint2f obj = new MatOfPoint2f(); 727 | obj.fromList(objList); 728 | 729 | MatOfPoint2f scene = new MatOfPoint2f(); 730 | scene.fromList(sceneList); 731 | 732 | Mat H = Calib3d.findHomography(obj, scene, Calib3d.RANSAC, 5); 733 | 734 | boolean result = true; 735 | 736 | double det = 0, N1 = 0, N2 = 0, N3 = 0; 737 | 738 | if (!H.empty()) { 739 | double[] p1 = H.get(0, 0); 740 | double[] p2 = H.get(1, 1); 741 | double[] p3 = H.get(1, 0); 742 | double[] p4 = H.get(0, 1); 743 | double[] p5 = H.get(2, 0); 744 | double[] p6 = H.get(2, 1); 745 | 746 | if (p1 != null && p2 != null && p3 != null && p4 != null) { 747 | det = p1[0] * p2[0] - p3[0] * p4[0]; 748 | if (det < 0) { 749 | result = false; 750 | } 751 | } else { 752 | result = false; 753 | } 754 | 755 | if (p1 != null && p3 != null) { 756 | N1 = Math.sqrt(p1[0] * p1[0] + p3[0] * p3[0]); 757 | if (N1 > 4 || N1 < 0.1) { 758 | result = false; 759 | } 760 | } else { 761 | result = false; 762 | } 763 | 764 | if (p2 != null && p4 != null) { 765 | N2 = Math.sqrt(p4[0] * p4[0] + p2[0] * p2[0]); 766 | if (N2 > 4 || N2 < 0.1) { 767 | result = false; 768 | } 769 | } else { 770 | result = false; 771 | } 772 | 773 | if (p5 != null && p6 != null) { 774 | N3 = Math.sqrt(p5[0] * p5[0] + p6[0] * p6[0]); 775 | if (N3 > 0.002) { 776 | result = false; 777 | } 778 | } else { 779 | result = false; 780 | } 781 | } else { 782 | result = false; 783 | } 784 | 785 | if (debug) { 786 | Log.i("####### DEBUG #######", det + " " + N1 + " " + N2 + " " + N3); 787 | } 788 | 789 | if (result) { 790 | Log.i("#### DETECTION ####", "Detected stuff"); 791 | updateState(true, index); 792 | Mat obj_corners = new Mat(4, 1, CvType.CV_32FC2); 793 | Mat scene_corners = new Mat(4, 1, CvType.CV_32FC2); 794 | 795 | obj_corners.put(0, 0, 0, 0); 796 | obj_corners.put(1, 0, pattern.cols(), 0); 797 | obj_corners.put(2, 0, pattern.cols(), pattern.rows()); 798 | obj_corners.put(3, 0, 0, pattern.rows()); 799 | 800 | Core.perspectiveTransform(obj_corners, scene_corners, H); 801 | 802 | // get mat size to match the detected coordinates with the screen size 803 | double width = (double)(gray.cols()); 804 | double height = (double)(gray.rows()); 805 | double scaleX = screenWidth/width; 806 | double scaleY = screenHeight/height; 807 | 808 | double coord1X = scene_corners.get(0, 0)[0] * scaleX; 809 | double coord1Y = scene_corners.get(0, 0)[1] * scaleY; 810 | double coord2X = scene_corners.get(1, 0)[0] * scaleX; 811 | double coord2Y = scene_corners.get(1, 0)[1] * scaleY; 812 | double coord3X = scene_corners.get(2, 0)[0] * scaleX; 813 | double coord3Y = scene_corners.get(2, 0)[1] * scaleY; 814 | double coord4X = scene_corners.get(3, 0)[0] * scaleX; 815 | double coord4Y = scene_corners.get(3, 0)[1] * scaleY; 816 | 817 | // find center of rect based on triangles centroids mean 818 | double centroidTriang1X = (coord1X + coord2X + coord3X)/3; 819 | double centroidTriang1Y = (coord1Y + coord2Y + coord3Y)/3; 820 | 821 | double centroidTriang2X = (coord3X + coord4X + coord1X)/3; 822 | double centroidTriang2Y = (coord3Y + coord4Y + coord1Y)/3; 823 | 824 | double centerx = (centroidTriang1X + centroidTriang2X)/2; 825 | double centery = (centroidTriang1Y + centroidTriang2Y)/2; 826 | 827 | coords = "\"coords\": {" + 828 | "\"1\": {\"x\": " + coord1X + ", \"y\": " + coord1Y + "}, " + 829 | "\"2\": {\"x\": " + coord2X + ", \"y\": " + coord2Y + "}, " + 830 | "\"3\": {\"x\": " + coord3X + ", \"y\": " + coord3Y + "}, " + 831 | "\"4\": {\"x\": " + coord4X + ", \"y\": " + coord4Y + "}}, " + 832 | "\"center\": {\"x\": " + centerx + ", \"y\": " + centery + "}"; 833 | 834 | if (debug) { 835 | Imgproc.line(img_matches, new Point(scene_corners.get(0, 0)), new Point(scene_corners.get(1, 0)), new Scalar(0, 255, 0), 4); 836 | Imgproc.line(img_matches, new Point(scene_corners.get(1, 0)), new Point(scene_corners.get(2, 0)), new Scalar(0, 255, 0), 4); 837 | Imgproc.line(img_matches, new Point(scene_corners.get(2, 0)), new Point(scene_corners.get(3, 0)), new Scalar(0, 255, 0), 4); 838 | Imgproc.line(img_matches, new Point(scene_corners.get(3, 0)), new Point(scene_corners.get(0, 0)), new Scalar(0, 255, 0), 4); 839 | } 840 | } else { 841 | updateState(false, index); 842 | } 843 | H.release(); 844 | } 845 | } 846 | gray.release(); 847 | if(index == (trigger_size - 1)) { 848 | thread_over = true; 849 | } 850 | } 851 | }); 852 | } 853 | 854 | private void setBase64Pattern(JSONArray dataArray) { 855 | detection.clear(); 856 | for (int i = 0; i < dataArray.length(); i++) { 857 | try { 858 | detection.add(0); 859 | String image_base64 = dataArray.getString(i); 860 | if(image_base64 != null && !image_base64.isEmpty()) { 861 | Mat image_pattern = new Mat(); 862 | MatOfKeyPoint kp1 = new MatOfKeyPoint(); 863 | Mat desc1 = new Mat(); 864 | 865 | int limit = 400; 866 | if(image_base64.contains("data:")) 867 | image_base64 = image_base64.split(",")[1]; 868 | byte[] decodedString = Base64.decode(image_base64, Base64.DEFAULT); 869 | Bitmap bitmap = BitmapFactory.decodeByteArray(decodedString, 0, decodedString.length); 870 | Bitmap scaled = bitmap; 871 | if (bitmap.getWidth() > limit) { 872 | double scale = bitmap.getWidth() / limit; 873 | scaled = Bitmap.createScaledBitmap(bitmap, (int) (bitmap.getWidth()/scale), (int) (bitmap.getHeight()/scale), true); 874 | if (bitmap.getHeight() > limit) { 875 | scale = bitmap.getHeight() / limit; 876 | scaled = Bitmap.createScaledBitmap(bitmap, (int) (bitmap.getWidth()/scale), (int) (bitmap.getHeight()/scale), true); 877 | } 878 | } 879 | Utils.bitmapToMat(scaled, image_pattern); 880 | Imgproc.cvtColor(image_pattern, image_pattern, Imgproc.COLOR_BGR2GRAY); 881 | //Imgproc.equalizeHist(image_pattern, image_pattern); 882 | 883 | if(save_files) { 884 | Utils.matToBitmap(image_pattern, scaled); 885 | String extStorageDirectory = Environment.getExternalStorageDirectory().toString(); 886 | int num = (int) (Math.random() * 10001); 887 | Imgcodecs.imwrite(extStorageDirectory + "/pic" + num + ".png", image_pattern); 888 | Log.i("### FILE ###", "File saved to " + extStorageDirectory + "/pic" + num + ".png"); 889 | } 890 | 891 | orbDetector.detect(image_pattern, kp1); 892 | orbDescriptor.compute(image_pattern, kp1, desc1); 893 | 894 | triggers.add(image_pattern); 895 | triggers_kps.add(kp1); 896 | triggers_descs.add(desc1); 897 | } 898 | } catch (JSONException e) { 899 | // do nothing 900 | } 901 | } 902 | } 903 | 904 | private void updateState(boolean state, int _index) { 905 | final int index = _index; 906 | 907 | int detection_limit = 6; 908 | 909 | if(state) { 910 | try { 911 | int result = detection.get(_index) + 1; 912 | if(result < detection_limit) { 913 | detection.set(_index, result); 914 | } 915 | } catch (IndexOutOfBoundsException ibe){ 916 | // detection.add(_index, 1); 917 | } 918 | } else { 919 | for (int i = 0; i < triggers.size(); i++) { 920 | try { 921 | int result = detection.get(i) - 1; 922 | if(result < 0) { 923 | detection.set(_index, 0); 924 | } else { 925 | detection.set(_index, result); 926 | } 927 | } catch (IndexOutOfBoundsException ibe){ 928 | // detection.add(i, 0); 929 | } 930 | } 931 | } 932 | 933 | if (getState(_index) && called_failed_detection && !called_success_detection) { 934 | cordova.getThreadPool().execute(new Runnable() { 935 | public void run() { 936 | PluginResult result = new PluginResult(PluginResult.Status.OK, "{\"message\":\"pattern detected\", \"index\":" + index + ", " + coords + "}"); 937 | result.setKeepCallback(true); 938 | cb.sendPluginResult(result); 939 | } 940 | }); 941 | called_success_detection = true; 942 | called_failed_detection = false; 943 | detected_index = _index; 944 | } 945 | 946 | boolean valid_index = detected_index == _index; 947 | 948 | if (!getState(_index) && !called_failed_detection && called_success_detection && valid_index) { 949 | cordova.getThreadPool().execute(new Runnable() { 950 | public void run() { 951 | PluginResult result = new PluginResult(PluginResult.Status.ERROR, "{\"message\":\"pattern not detected\"}"); 952 | result.setKeepCallback(true); 953 | cb.sendPluginResult(result); 954 | } 955 | }); 956 | called_success_detection = false; 957 | called_failed_detection = true; 958 | } 959 | } 960 | 961 | private boolean getState(int index) { 962 | int total; 963 | int detection_thresh = 3; 964 | 965 | total = detection.get(index); 966 | 967 | if(debug) { 968 | Log.i("## GET STATE RESULT ##", " state -> " + total); 969 | } 970 | 971 | return total >= detection_thresh; 972 | } 973 | } 974 | -------------------------------------------------------------------------------- /src/android/libs/build-extras.gradle: -------------------------------------------------------------------------------- 1 | dependencies { 2 | compile (name: 'opencv-release', ext: 'aar') 3 | } 4 | repositories { 5 | flatDir{ 6 | dirs 'libs' 7 | } 8 | } 9 | 10 | ext.postBuildExtras = { 11 | android { 12 | compileOptions { 13 | sourceCompatibility JavaVersion.VERSION_1_7 14 | targetCompatibility JavaVersion.VERSION_1_7 15 | } 16 | allprojects { 17 | compileOptions { 18 | sourceCompatibility = JavaVersion.VERSION_1_7 19 | targetCompatibility = JavaVersion.VERSION_1_7 20 | } 21 | } 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /src/android/libs/opencv-release.aar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CyrexEnterprise/ImageDetectionCordovaPlugin/936f34101c523449a6d77b055653c3e0ee9ad36a/src/android/libs/opencv-release.aar -------------------------------------------------------------------------------- /src/ios/ImageDetectionPlugin.h: -------------------------------------------------------------------------------- 1 | #import 2 | #import 3 | 4 | #import 5 | 6 | @interface ImageDetectionPlugin : CDVPlugin 7 | { 8 | UIImageView *img; 9 | CvVideoCamera *camera; 10 | } 11 | 12 | @property (nonatomic, retain) CvVideoCamera *camera; 13 | @property (nonatomic, retain) UIImageView *img; 14 | 15 | - (void)greet:(CDVInvokedUrlCommand*)command; 16 | 17 | - (void)isDetecting:(CDVInvokedUrlCommand*)command; 18 | 19 | - (void)setPatterns:(CDVInvokedUrlCommand*)command; 20 | 21 | - (void)startProcessing:(CDVInvokedUrlCommand*)command; 22 | 23 | - (void)setDetectionTimeout:(CDVInvokedUrlCommand*)command; 24 | 25 | @end 26 | -------------------------------------------------------------------------------- /src/ios/ImageDetectionPlugin.mm: -------------------------------------------------------------------------------- 1 | #import "ImageDetectionPlugin.h" 2 | #import "ImageUtils.h" 3 | #import 4 | #import 5 | 6 | using namespace cv; 7 | 8 | @interface ImageDetectionPlugin() 9 | { 10 | std::vector triggers, triggers_descs; 11 | std::vector< std::vector > triggers_kps; 12 | bool processFrames, debug, save_files, thread_over, called_success_detection, called_failed_detection; 13 | int detected_index; 14 | NSMutableArray *detection; 15 | NSString *callbackID, *coords; 16 | NSDate *last_time, *ease_last_time, *timeout_started; 17 | float timeout, full_timeout, ease_time; 18 | NSUInteger triggers_size; 19 | double screenWidth, screenHeight; 20 | } 21 | 22 | @end 23 | 24 | @implementation ImageDetectionPlugin 25 | 26 | @synthesize camera, img; 27 | 28 | - (void)greet:(CDVInvokedUrlCommand*)command 29 | { 30 | [self.commandDelegate runInBackground:^{ 31 | CDVPluginResult* plugin_result = nil; 32 | NSString* name = [command.arguments objectAtIndex:0]; 33 | NSString* msg = [NSString stringWithFormat: @"Hello, %@", name]; 34 | 35 | if (name != nil && [name length] > 0) { 36 | plugin_result = [CDVPluginResult resultWithStatus:CDVCommandStatus_OK messageAsString:msg]; 37 | } else { 38 | plugin_result = [CDVPluginResult resultWithStatus:CDVCommandStatus_ERROR]; 39 | } 40 | 41 | [self.commandDelegate sendPluginResult:plugin_result callbackId:command.callbackId]; 42 | }]; 43 | } 44 | 45 | -(void)isDetecting:(CDVInvokedUrlCommand*)command 46 | { 47 | callbackID = command.callbackId; 48 | } 49 | 50 | - (void)setPatterns:(CDVInvokedUrlCommand*)command; 51 | { 52 | [self.commandDelegate runInBackground:^{ 53 | CDVPluginResult* plugin_result = nil; 54 | NSMutableString* msg = [NSMutableString stringWithString:@""]; 55 | NSArray* patterns = [[NSArray alloc] init]; 56 | patterns = command.arguments; 57 | 58 | if (patterns != nil && [patterns count] > 0) { 59 | triggers_size = [patterns count]; 60 | triggers.clear(); 61 | triggers_kps.clear(); 62 | triggers_descs.clear(); 63 | [msg appendFormat:@"Patterns to be set - %lu", (unsigned long)[patterns count]]; 64 | int triggers_length = 0; 65 | if(!triggers.empty()){ 66 | triggers_length = (int)triggers.size(); 67 | } 68 | [msg appendFormat:@"\nBefore set pattern - %d", triggers_length]; 69 | [self setBase64Pattern: patterns]; 70 | if(!triggers.empty()){ 71 | triggers_length = (int)triggers.size(); 72 | } 73 | [msg appendFormat:@"\nAfter set pattern - %d", triggers_length]; 74 | if((int) triggers.size() == triggers_size){ 75 | [msg appendFormat:@"\nPatterns set - %d", triggers_length]; 76 | plugin_result = [CDVPluginResult resultWithStatus:CDVCommandStatus_OK messageAsString:msg]; 77 | } else { 78 | [msg appendString:@"\nOne or more patterns failed to be set."]; 79 | plugin_result = [CDVPluginResult resultWithStatus:CDVCommandStatus_ERROR messageAsString:msg]; 80 | } 81 | } else { 82 | [msg appendString:@"At least one pattern must be set."]; 83 | plugin_result = [CDVPluginResult resultWithStatus:CDVCommandStatus_ERROR messageAsString:msg]; 84 | } 85 | 86 | [self.commandDelegate sendPluginResult:plugin_result callbackId:command.callbackId]; 87 | }]; 88 | } 89 | 90 | - (void)startProcessing:(CDVInvokedUrlCommand*)command; 91 | { 92 | [self.commandDelegate runInBackground:^{ 93 | CDVPluginResult* plugin_result = nil; 94 | NSNumber* argVal = [command.arguments objectAtIndex:0]; 95 | NSNumber* sHeight = [command.arguments objectAtIndex:1]; 96 | NSNumber* sWidth = [command.arguments objectAtIndex:2]; 97 | NSString* msg; 98 | 99 | if (argVal != nil) { 100 | BOOL argValBool; 101 | @try { 102 | argValBool = [argVal boolValue]; 103 | screenWidth = [sWidth doubleValue]; 104 | screenHeight = [sHeight doubleValue]; 105 | } 106 | @catch (NSException *exception) { 107 | argValBool = YES; 108 | self->screenWidth = 1.0; 109 | self->screenHeight = 1.0; 110 | NSLog(@"%@", exception.reason); 111 | } 112 | if (argValBool == YES) { 113 | processFrames = true; 114 | msg = @"Frame processing set to 'true'"; 115 | } else { 116 | processFrames = false; 117 | msg = @"Frame processing set to 'false'"; 118 | } 119 | plugin_result = [CDVPluginResult resultWithStatus:CDVCommandStatus_OK messageAsString:msg]; 120 | } else { 121 | msg = @"No value"; 122 | plugin_result = [CDVPluginResult resultWithStatus:CDVCommandStatus_ERROR messageAsString:msg]; 123 | } 124 | 125 | [self.commandDelegate sendPluginResult:plugin_result callbackId:command.callbackId]; 126 | }]; 127 | } 128 | 129 | - (void)setDetectionTimeout:(CDVInvokedUrlCommand*)command; 130 | { 131 | [self.commandDelegate runInBackground:^{ 132 | CDVPluginResult* plugin_result = nil; 133 | NSNumber* argVal = [command.arguments objectAtIndex:0]; 134 | NSString* msg; 135 | 136 | if (argVal != nil && argVal > (void *) 0) { 137 | timeout = [argVal floatValue]; 138 | ease_time = 0.5; 139 | timeout_started = [NSDate date]; 140 | msg = [NSString stringWithFormat:@"Processing timeout set to %@", argVal]; 141 | plugin_result = [CDVPluginResult resultWithStatus:CDVCommandStatus_OK messageAsString:msg]; 142 | } else { 143 | msg = @"No value or timeout value negative."; 144 | plugin_result = [CDVPluginResult resultWithStatus:CDVCommandStatus_ERROR messageAsString:msg]; 145 | } 146 | 147 | [self.commandDelegate sendPluginResult:plugin_result callbackId:command.callbackId]; 148 | }]; 149 | } 150 | 151 | -(void)setBase64Pattern:(NSArray *)patterns 152 | { 153 | ORB orb = ORB::ORB(); 154 | 155 | for (int i = 0; i < [patterns count]; i++) { 156 | [detection insertObject:[NSNumber numberWithInt:0] atIndex:i]; 157 | NSString *image_base64 = [patterns objectAtIndex:i]; 158 | 159 | if ([image_base64 rangeOfString:@"data:"].location == NSNotFound) { 160 | // do nothing 161 | } else { 162 | NSArray *lines = [image_base64 componentsSeparatedByString: @","]; 163 | image_base64 = lines[1]; 164 | } 165 | 166 | int width_limit = 400, height_limit = 400; 167 | 168 | UIImage *image = [ImageUtils decodeBase64ToImage: image_base64]; 169 | UIImage *scaled = image; 170 | 171 | // scale image to improve detection 172 | //NSLog(@"SCALE BEFORE %f", (scaled.size.width)); 173 | if(image.size.width > width_limit) { 174 | scaled = [UIImage imageWithCGImage:[image CGImage] scale:(image.size.width/width_limit) orientation:(image.imageOrientation)]; 175 | if(scaled.size.height > height_limit) { 176 | scaled = [UIImage imageWithCGImage:[scaled CGImage] scale:(scaled.size.height/height_limit) orientation:(scaled.imageOrientation)]; 177 | } 178 | } 179 | //NSLog(@"SCALE AFTER %f", (scaled.size.width)); 180 | 181 | Mat patt, desc1; 182 | std::vector kp1; 183 | 184 | patt = [ImageUtils cvMatFromUIImage: scaled]; 185 | 186 | patt = [ImageUtils cvMatFromUIImage: scaled]; 187 | cvtColor(patt, patt, CV_BGRA2GRAY); 188 | //equalizeHist(patt, patt); 189 | 190 | //save mat as image 191 | if (save_files) 192 | { 193 | UIImageWriteToSavedPhotosAlbum([ImageUtils UIImageFromCVMat:patt], nil, nil, nil); 194 | } 195 | orb.detect(patt, kp1); 196 | orb.compute(patt, kp1, desc1); 197 | 198 | triggers.push_back(patt); 199 | triggers_kps.push_back(kp1); 200 | triggers_descs.push_back(desc1); 201 | } 202 | } 203 | 204 | - (void)pluginInitialize { 205 | // set orientation portraint 206 | NSNumber *value = [NSNumber numberWithInt:UIInterfaceOrientationPortrait]; 207 | [[UIDevice currentDevice] setValue:value forKey:@"orientation"]; 208 | 209 | // set webview and it's subviews to transparent 210 | for (UIView *subview in [self.webView subviews]) { 211 | [subview setOpaque:NO]; 212 | [subview setBackgroundColor:[UIColor clearColor]]; 213 | } 214 | [self.webView setBackgroundColor:[UIColor clearColor]]; 215 | [self.webView setOpaque: NO]; 216 | // setup view to render the camera capture 217 | CGRect screenRect = [[UIScreen mainScreen] bounds]; 218 | img = [[UIImageView alloc] initWithFrame: screenRect]; 219 | img.contentMode = UIViewContentModeScaleAspectFill; 220 | [self.webView.superview addSubview: img]; 221 | // set views order 222 | [self.webView.superview bringSubviewToFront: self.webView]; 223 | 224 | //Camera 225 | self.camera = [[CvVideoCamera alloc] initWithParentView: img]; 226 | self.camera.useAVCaptureVideoPreviewLayer = YES; 227 | self.camera.defaultAVCaptureDevicePosition = AVCaptureDevicePositionBack; 228 | self.camera.defaultAVCaptureSessionPreset = AVCaptureSessionPresetMedium; 229 | self.camera.defaultAVCaptureVideoOrientation = AVCaptureVideoOrientationPortrait; 230 | self.camera.defaultFPS = 30; 231 | self.camera.grayscaleMode = NO; 232 | 233 | self.camera.delegate = self; 234 | 235 | processFrames = true; 236 | debug = false; 237 | save_files = false; 238 | thread_over = true; 239 | called_success_detection = false; 240 | called_failed_detection = true; 241 | 242 | timeout = 0.0; 243 | full_timeout = 6.0; 244 | ease_time = 0.0; 245 | last_time = [NSDate date]; 246 | timeout_started = last_time; 247 | ease_last_time = last_time; 248 | 249 | detection = [[NSMutableArray alloc] init]; 250 | triggers_size = -1; 251 | detected_index = -1; 252 | 253 | [self.camera start]; 254 | NSLog(@"----------- CAMERA STARTED ----------"); 255 | NSLog(@"----------- CV_VERSION %s ----------", CV_VERSION); 256 | } 257 | 258 | #pragma mark - Protocol CvVideoCameraDelegate 259 | #ifdef __cplusplus 260 | - (void)processImage:(Mat &)image; 261 | { 262 | //get current time and calculate time passed since last time update 263 | NSDate *current_time = [NSDate date]; 264 | NSTimeInterval time_passed = [current_time timeIntervalSinceDate:last_time]; 265 | NSTimeInterval time_diff_passed = [current_time timeIntervalSinceDate:timeout_started]; 266 | NSTimeInterval passed_ease = [current_time timeIntervalSinceDate:ease_last_time]; 267 | 268 | //NSLog(@"time passed %f, time full %f, passed ease %f", time_passed, time_diff_passed, passed_ease); 269 | 270 | //process frames if option is true and timeout passed 271 | BOOL hasTriggerSet = false; 272 | if(!triggers.empty()){ 273 | hasTriggerSet = triggers.size() == triggers_size; 274 | } 275 | if (processFrames && time_passed > timeout && hasTriggerSet) { 276 | //check if time passed full timout time 277 | if(time_diff_passed > full_timeout) { 278 | ease_time = 0.0; 279 | } 280 | // ease detection after timeout 281 | if (passed_ease > ease_time) { 282 | // process each image in new thread 283 | if(!image.empty() && thread_over){ 284 | for (int i = 0; i < triggers.size(); i++) { 285 | Mat patt = triggers.at(i); 286 | std::vector kp1 = triggers_kps.at(i); 287 | Mat desc1 = triggers_descs.at(i); 288 | thread_over = false; 289 | Mat image_copy = image.clone(); 290 | dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{ 291 | [self backgroundImageProcessing: image_copy pattern:patt keypoints:kp1 descriptor:desc1 index:i]; 292 | dispatch_sync(dispatch_get_main_queue(), ^{ 293 | if(i == (triggers.size() - 1)) { 294 | thread_over = true; 295 | } 296 | }); 297 | }); 298 | } 299 | } 300 | ease_last_time = current_time; 301 | } 302 | 303 | //update time and reset timeout 304 | last_time = current_time; 305 | timeout = 0.0; 306 | } 307 | } 308 | #endif 309 | 310 | #ifdef __cplusplus 311 | - (void)backgroundImageProcessing:(const Mat &)image pattern:(const Mat &)patt keypoints:(const std::vector &)kp1 descriptor:(const Mat &)desc1 index:(const int &)idx 312 | { 313 | if(!image.empty() && !patt.empty()) 314 | { 315 | Mat gray = image; 316 | //Mat image_copy = image; 317 | Mat desc2; 318 | std::vector kp2; 319 | 320 | cvtColor(image, gray, CV_BGRA2GRAY); 321 | //equalizeHist(gray, gray); 322 | 323 | ORB orb = ORB::ORB(); 324 | orb.detect(gray, kp2); 325 | orb.compute(gray, kp2, desc2); 326 | 327 | BFMatcher bf = BFMatcher::BFMatcher(NORM_HAMMING2, true); 328 | std::vector matches; 329 | std::vector good_matches; 330 | 331 | if(!desc1.empty() && !desc2.empty()) 332 | { 333 | bf.match(desc1, desc2, matches); 334 | 335 | int size = 0; 336 | double min_dist = 1000; 337 | if(desc1.rows < matches.size()) 338 | size = desc1.rows; 339 | else 340 | size = (int)matches.size(); 341 | 342 | for(int i = 0; i < size; i++) 343 | { 344 | double dist = matches[i].distance; 345 | if(dist < min_dist) 346 | { 347 | min_dist = dist; 348 | } 349 | } 350 | 351 | std::vector good_matches_reduced; 352 | 353 | for(int i = 0; i < size; i++) 354 | { 355 | if(matches[i].distance <= 2 * min_dist && good_matches.size() < 500) 356 | { 357 | good_matches.push_back(matches[i]); 358 | if(i < 10 && debug) 359 | { 360 | good_matches_reduced.push_back(matches[i]); 361 | } 362 | } 363 | } 364 | 365 | if(good_matches.size() >= 8) 366 | { 367 | if(debug) 368 | { 369 | Mat imageMatches; 370 | drawMatches(patt, kp1, gray, kp2, good_matches_reduced, imageMatches, Scalar::all(-1), Scalar::all(-1), std::vector(), DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS); 371 | //image_copy = imageMatches; 372 | } 373 | 374 | Mat img_matches = image; 375 | //-- Localize the object 376 | std::vector obj; 377 | std::vector scene; 378 | 379 | for( int i = 0; i < good_matches.size(); i++ ) 380 | { 381 | //-- Get the keypoints from the good matches 382 | obj.push_back( kp1[ good_matches[i].queryIdx ].pt ); 383 | scene.push_back( kp2[ good_matches[i].trainIdx ].pt ); 384 | } 385 | 386 | Mat H = findHomography( obj, scene, CV_RANSAC); 387 | 388 | bool result = true; 389 | 390 | if (!H.empty()) { 391 | const double p1 = H.at(0, 0); 392 | const double p2 = H.at(1, 1); 393 | const double p3 = H.at(1, 0); 394 | const double p4 = H.at(0, 1); 395 | const double p5 = H.at(2, 0); 396 | const double p6 = H.at(2, 1); 397 | double det = 0, N1 = 0, N2 = 0, N3 = 0; 398 | 399 | if (p1 && p2 && p3 && p4) { 400 | det = p1 * p2 - p3 * p4; 401 | if (det < 0) 402 | result = false; 403 | } else { 404 | result = false; 405 | } 406 | 407 | if (p1 && p3) { 408 | N1 = sqrt(p1 * p1 + p3 * p3); 409 | if (N1 > 4 || N1 < 0.1) 410 | result = false; 411 | } else { 412 | result = false; 413 | } 414 | 415 | if (p2 && p4) { 416 | N2 = sqrt(p4 * p4 + p2 * p2); 417 | if (N2 > 4 || N2 < 0.1) 418 | result = false; 419 | } else { 420 | result = false; 421 | } 422 | 423 | if (p5 && p6) { 424 | N3 = sqrt(p5 * p5 + p6 * p6); 425 | if (N3 > 0.002) 426 | result = false; 427 | } else { 428 | result = false; 429 | } 430 | 431 | //NSLog(@"det %f, N1 %f, N2 %f, N3 %f, result %i", det, N1, N2, N3, result); 432 | } else { 433 | result = false; 434 | } 435 | 436 | if(result) 437 | { 438 | NSLog(@"detecting for index - %d", (int)idx); 439 | [self updateState: true index:(int)idx]; 440 | if(save_files) 441 | { 442 | UIImageWriteToSavedPhotosAlbum([ImageUtils UIImageFromCVMat:gray], nil, nil, nil); 443 | } 444 | 445 | //-- Get the corners from the image_1 ( the object to be "detected" ) 446 | std::vector obj_corners(4); 447 | obj_corners[0] = cvPoint(0,0); obj_corners[1] = cvPoint( patt.cols, 0 ); 448 | obj_corners[2] = cvPoint( patt.cols, patt.rows ); obj_corners[3] = cvPoint( 0, patt.rows ); 449 | std::vector scene_corners(4); 450 | 451 | perspectiveTransform( obj_corners, scene_corners, H); 452 | 453 | // get mat size to match the detected coordinates with the screen size 454 | double width = gray.cols; 455 | double height = gray.rows; 456 | double scaleX = screenWidth/width; 457 | double scaleY = screenHeight/height; 458 | 459 | double coord1X = scene_corners[0].x * scaleX; 460 | double coord1Y = scene_corners[0].y * scaleY; 461 | double coord2X = scene_corners[1].x * scaleX; 462 | double coord2Y = scene_corners[1].y * scaleY; 463 | double coord3X = scene_corners[2].x * scaleX; 464 | double coord3Y = scene_corners[2].y * scaleY; 465 | double coord4X = scene_corners[3].x * scaleX; 466 | double coord4Y = scene_corners[3].y * scaleY; 467 | 468 | // find center of rect based on triangles centroids mean 469 | double centroidTriang1X = (coord1X + coord2X + coord3X)/3; 470 | double centroidTriang1Y = (coord1Y + coord2Y + coord3Y)/3; 471 | 472 | double centroidTriang2X = (coord3X + coord4X + coord1X)/3; 473 | double centroidTriang2Y = (coord3Y + coord4Y + coord1Y)/3; 474 | 475 | double centerx = (centroidTriang1X + centroidTriang2X)/2; 476 | double centery = (centroidTriang1Y + centroidTriang2Y)/2; 477 | 478 | coords = [NSString stringWithFormat:@"\"coords\": {\"1\": {\"x\": %f, \"y\": %f}, \"2\": {\"x\": %f, \"y\": %f}, \"3\": {\"x\": %f, \"y\": %f}, \"4\": {\"x\": %f, \"y\": %f}}, \"center\": {\"x\": %f, \"y\": %f}", coord1X, coord1Y, coord2X, coord2Y, coord3X, coord3Y, coord4X, coord4Y, centerx, centery]; 479 | 480 | if(debug) 481 | { 482 | 483 | //-- Draw lines between the corners (the mapped object in the scene - image_2 ) 484 | line( img_matches, scene_corners[0] + Point2f( patt.cols, 0), scene_corners[1] + Point2f( patt.cols, 0), Scalar(0, 255, 0), 4 ); 485 | line( img_matches, scene_corners[1] + Point2f( patt.cols, 0), scene_corners[2] + Point2f( patt.cols, 0), Scalar( 0, 255, 0), 4 ); 486 | line( img_matches, scene_corners[2] + Point2f( patt.cols, 0), scene_corners[3] + Point2f( patt.cols, 0), Scalar( 0, 255, 0), 4 ); 487 | line( img_matches, scene_corners[3] + Point2f( patt.cols, 0), scene_corners[0] + Point2f( patt.cols, 0), Scalar( 0, 255, 0), 4 ); 488 | 489 | //image_copy = img_matches; 490 | } 491 | } else { 492 | [self updateState: false index:(int)idx]; 493 | } 494 | H.release(); 495 | img_matches.release(); 496 | } 497 | matches.clear(); 498 | good_matches.clear(); 499 | good_matches_reduced.clear(); 500 | } 501 | gray.release(); 502 | desc2.release(); 503 | kp2.clear(); 504 | //image = image_copy; 505 | } 506 | } 507 | #endif 508 | 509 | -(void)updateState:(BOOL) state index:(const int &)idx 510 | { 511 | int detection_limit = 6; 512 | // 513 | // if(detection.count > detection_limit) 514 | // { 515 | // [detection removeObjectAtIndex:0]; 516 | // } 517 | 518 | NSLog(@"updating state for index - %d", (int)idx); 519 | 520 | if(state) 521 | { 522 | int result = [[detection objectAtIndex:(int)idx] intValue] + 1; 523 | if(result < detection_limit) { 524 | [detection replaceObjectAtIndex:idx withObject:[NSNumber numberWithInt:result]]; 525 | } 526 | } else { 527 | for (int i = 0; i < triggers.size(); i++) { 528 | int result = [[detection objectAtIndex:(int)i] intValue] - 1; 529 | if(result < 0) { 530 | result = 0; 531 | } 532 | [detection replaceObjectAtIndex:idx withObject:[NSNumber numberWithInt:result]]; 533 | } 534 | } 535 | 536 | if([self getState:(int)idx] && called_failed_detection && !called_success_detection) { 537 | [self.commandDelegate runInBackground:^{ 538 | CDVPluginResult* plugin_result = nil; 539 | NSString* msg = [NSString stringWithFormat:@"{\"message\":\"pattern detected\", \"index\":%d, %@}", (int)idx, coords]; 540 | plugin_result = [CDVPluginResult resultWithStatus:CDVCommandStatus_OK messageAsString:msg]; 541 | [plugin_result setKeepCallbackAsBool:YES]; 542 | 543 | [self.commandDelegate sendPluginResult:plugin_result callbackId:callbackID]; 544 | }]; 545 | called_success_detection = true; 546 | called_failed_detection = false; 547 | detected_index = (int)idx; 548 | } 549 | 550 | bool valid_index = detected_index == (int)idx; 551 | 552 | if(![self getState:(int)idx] && !called_failed_detection && called_success_detection && valid_index) { 553 | [self.commandDelegate runInBackground:^{ 554 | CDVPluginResult* plugin_result = nil; 555 | NSString* msg = @"{\"message\":\"pattern not detected\"}"; 556 | plugin_result = [CDVPluginResult resultWithStatus:CDVCommandStatus_ERROR messageAsString:msg]; 557 | [plugin_result setKeepCallbackAsBool:YES]; 558 | 559 | [self.commandDelegate sendPluginResult:plugin_result callbackId:callbackID]; 560 | }]; 561 | called_success_detection = false; 562 | called_failed_detection = true; 563 | } 564 | } 565 | 566 | -(BOOL)getState: (const int &) index 567 | { 568 | int detection_thresh = 3; 569 | NSNumber *total = 0; 570 | total = [detection objectAtIndex:index]; 571 | 572 | if ([total intValue] >= detection_thresh) { 573 | return true; 574 | } else { 575 | return false; 576 | } 577 | } 578 | 579 | @end 580 | -------------------------------------------------------------------------------- /src/ios/ImageUtils.h: -------------------------------------------------------------------------------- 1 | // 2 | // ImageUtils.h 3 | // OpenCVTest 4 | // 5 | // Created by DNVA on 24/03/16. 6 | // Copyright © 2016 DNVA. All rights reserved. 7 | // 8 | 9 | #import 10 | #import 11 | 12 | #include 13 | 14 | @interface ImageUtils : NSObject 15 | 16 | 17 | + (UIImage *)decodeBase64ToImage:(NSString *)encodedData; 18 | + (cv::Mat)cvMatFromUIImage:(UIImage *)image; 19 | + (UIImage *) UIImageFromCVMat: (cv::Mat)cvMat; 20 | + (UIImage*)imageWithImage:(UIImage*)image scaledToSize:(CGSize)newSize; 21 | 22 | @end 23 | -------------------------------------------------------------------------------- /src/ios/ImageUtils.mm: -------------------------------------------------------------------------------- 1 | // 2 | // ImageUtils.m 3 | // OpenCVTest 4 | // 5 | // Created by DNVA on 24/03/16. 6 | // Copyright © 2016 DNVA. All rights reserved. 7 | // 8 | 9 | #import "ImageUtils.h" 10 | #include 11 | 12 | @implementation ImageUtils 13 | 14 | // decode base64 string to UIImage 15 | + (UIImage *)decodeBase64ToImage:(NSString *)encodedData 16 | { 17 | NSData *data = [[NSData alloc]initWithBase64EncodedString:encodedData options:NSDataBase64DecodingIgnoreUnknownCharacters]; 18 | return [UIImage imageWithData:data]; 19 | } 20 | 21 | // get cvMat from UIImage 22 | + (cv::Mat) cvMatFromUIImage: (UIImage *) image 23 | { 24 | CGColorSpaceRef colorSpace = CGImageGetColorSpace(image.CGImage); 25 | CGFloat cols = image.size.width; 26 | CGFloat rows = image.size.height; 27 | 28 | cv::Mat cvMat(rows, cols, CV_8UC4); // 8 bits per component, 4 channels (color channels + alpha) 29 | 30 | CGContextRef contextRef = CGBitmapContextCreate(cvMat.data, // Pointer to data 31 | cols, // Width of bitmap 32 | rows, // Height of bitmap 33 | 8, // Bits per component 34 | cvMat.step[0], // Bytes per row 35 | colorSpace, // Colorspace 36 | kCGImageAlphaNoneSkipLast | 37 | kCGBitmapByteOrderDefault); // Bitmap info flags 38 | 39 | CGContextDrawImage(contextRef, CGRectMake(0, 0, cols, rows), image.CGImage); 40 | CGContextRelease(contextRef); 41 | 42 | return cvMat; 43 | } 44 | 45 | + (UIImage *) UIImageFromCVMat: (cv::Mat)cvMat 46 | { 47 | NSData *data = [NSData dataWithBytes:cvMat.data length:cvMat.elemSize()*cvMat.total()]; 48 | CGColorSpaceRef colorSpace; 49 | 50 | if (cvMat.elemSize() == 1) 51 | { 52 | colorSpace = CGColorSpaceCreateDeviceGray(); 53 | } 54 | else 55 | { 56 | colorSpace = CGColorSpaceCreateDeviceRGB(); 57 | } 58 | 59 | CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data); 60 | 61 | // Creating CGImage from cv::Mat 62 | CGImageRef imageRef = CGImageCreate(cvMat.cols, //width 63 | cvMat.rows, //height 64 | 8, //bits per component 65 | 8 * cvMat.elemSize(), //bits per pixel 66 | cvMat.step[0], //bytesPerRow 67 | colorSpace, //colorspace 68 | kCGImageAlphaNone|kCGBitmapByteOrderDefault,//bitmap info 69 | provider, //CGDataProviderRef 70 | NULL, //decode 71 | false, //should interpolate 72 | kCGRenderingIntentDefault //intent 73 | ); 74 | 75 | 76 | // Getting UIImage from CGImage 77 | UIImage *finalImage = [UIImage imageWithCGImage:imageRef]; 78 | CGImageRelease(imageRef); 79 | CGDataProviderRelease(provider); 80 | CGColorSpaceRelease(colorSpace); 81 | 82 | return finalImage; 83 | } 84 | 85 | + (void)beginImageContextWithSize:(CGSize)size 86 | { 87 | if ([[UIScreen mainScreen] respondsToSelector:@selector(scale)]) { 88 | if ([[UIScreen mainScreen] scale] == 2.0) { 89 | UIGraphicsBeginImageContextWithOptions(size, YES, 2.0); 90 | } else { 91 | UIGraphicsBeginImageContext(size); 92 | } 93 | } else { 94 | UIGraphicsBeginImageContext(size); 95 | } 96 | } 97 | 98 | + (void)endImageContext 99 | { 100 | UIGraphicsEndImageContext(); 101 | } 102 | 103 | + (UIImage*)imageWithImage:(UIImage*)image scaledToSize:(CGSize)newSize 104 | { 105 | [self beginImageContextWithSize:newSize]; 106 | [image drawInRect:CGRectMake(0,0,newSize.width,newSize.height)]; 107 | UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext(); 108 | [self endImageContext]; 109 | return newImage; 110 | } 111 | 112 | @end 113 | -------------------------------------------------------------------------------- /src/ios/libjpeg.a: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CyrexEnterprise/ImageDetectionCordovaPlugin/936f34101c523449a6d77b055653c3e0ee9ad36a/src/ios/libjpeg.a -------------------------------------------------------------------------------- /src/ios/opencv2.framework.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CyrexEnterprise/ImageDetectionCordovaPlugin/936f34101c523449a6d77b055653c3e0ee9ad36a/src/ios/opencv2.framework.zip -------------------------------------------------------------------------------- /www/ImageDetectionPlugin.js: -------------------------------------------------------------------------------- 1 | /** 2 | * 3 | **/ 4 | var ImageDetectionPlugin = function () {}; 5 | 6 | ImageDetectionPlugin.prototype.startProcessing = function (bool, successCallback, errorCallback) { 7 | cordova.exec(successCallback, errorCallback, "ImageDetectionPlugin", "startProcessing", [bool, window.innerHeight, window.innerWidth]); 8 | }; 9 | ImageDetectionPlugin.prototype.setPatterns = function (patterns, successCallback, errorCallback) { 10 | var _patterns = []; 11 | if (!(patterns instanceof Array)){ 12 | _patterns.push(patterns); 13 | } else { 14 | _patterns = patterns; 15 | } 16 | cordova.exec(successCallback, errorCallback, "ImageDetectionPlugin", "setPatterns", _patterns); 17 | }; 18 | ImageDetectionPlugin.prototype.isDetecting = function (successCallback, errorCallback) { 19 | cordova.exec(successCallback, errorCallback, "ImageDetectionPlugin", "isDetecting", []); 20 | }; 21 | ImageDetectionPlugin.prototype.setDetectionTimeout = function (timeout, successCallback, errorCallback) { 22 | cordova.exec(successCallback, errorCallback, "ImageDetectionPlugin", "setDetectionTimeout", [timeout]); 23 | }; 24 | ImageDetectionPlugin.prototype.greet = function (name, successCallback, errorCallback) { 25 | cordova.exec(successCallback, errorCallback, "ImageDetectionPlugin", "greet", [name]); 26 | }; 27 | 28 | if (!window.plugins) { 29 | window.plugins = {}; 30 | } 31 | 32 | if (!window.plugins.ImageDetectionPlugin) { 33 | window.plugins.ImageDetectionPlugin = new ImageDetectionPlugin(); 34 | } 35 | 36 | if (typeof module != 'undefined' && module.exports){ 37 | module.exports = ImageDetectionPlugin; 38 | } 39 | --------------------------------------------------------------------------------