├── AndroidManifest.xml
├── lint.xml
├── project.properties
├── res
├── drawable-nodpi
│ └── monkey_tex.png
├── drawable
│ └── icon.png
├── layout
│ └── image_manipulations_surface_view.xml
├── raw
│ └── block10_mtl.mtl
└── values
│ └── strings.xml
└── src
└── com
└── timegalore
└── motiondetectionar
├── CAMShiftDetection.java
├── MotionDetectionActivity.java
├── MotionFlowDetection.java
└── OpenGLRenderer.java
/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
6 |
7 |
11 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
27 |
28 |
29 |
30 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
45 |
48 |
51 |
54 |
55 |
56 |
59 |
60 |
--------------------------------------------------------------------------------
/lint.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
--------------------------------------------------------------------------------
/project.properties:
--------------------------------------------------------------------------------
1 | # This file is automatically generated by Android Tools.
2 | # Do not modify this file -- YOUR CHANGES WILL BE ERASED!
3 | #
4 | # This file must be checked in Version Control Systems.
5 | #
6 | # To customize properties used by the Ant build system edit
7 | # "ant.properties", and override values to adapt the script to your
8 | # project structure.
9 | #
10 | # To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
11 | #proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
12 |
13 | android.library.reference.1=..\\..\\..\\OpenCV-2.4.6-android-sdk\\sdk\\java
14 | # Project target.
15 | target=android-17
16 | android.library.reference.2=../Rajawali
17 |
--------------------------------------------------------------------------------
/res/drawable-nodpi/monkey_tex.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/timegalore/MotionDetectionAR/9f90d8fdf1aec873b780833c96c95ac7568e3721/res/drawable-nodpi/monkey_tex.png
--------------------------------------------------------------------------------
/res/drawable/icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/timegalore/MotionDetectionAR/9f90d8fdf1aec873b780833c96c95ac7568e3721/res/drawable/icon.png
--------------------------------------------------------------------------------
/res/layout/image_manipulations_surface_view.xml:
--------------------------------------------------------------------------------
1 |
5 |
6 |
7 |
8 |
13 |
14 |
--------------------------------------------------------------------------------
/res/raw/block10_mtl.mtl:
--------------------------------------------------------------------------------
1 | # Blender MTL File: 'Bongo vanilla.blend'
2 | # Material Count: 1
3 |
4 | newmtl MonkeyMat
5 | Ns 96.078431
6 | Ka 0.000000 0.000000 0.000000
7 | Kd 0.640000 0.640000 0.640000
8 | Ks 0.100000 0.100000 0.100000
9 | Ni 1.000000
10 | d 1.000000
11 | illum 2
12 | map_Kd monkey_tex.png
--------------------------------------------------------------------------------
/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | Motion Detection AR
4 |
5 |
--------------------------------------------------------------------------------
/src/com/timegalore/motiondetectionar/CAMShiftDetection.java:
--------------------------------------------------------------------------------
1 | package com.timegalore.motiondetectionar;
2 |
3 | import java.util.ArrayList;
4 | import java.util.List;
5 |
6 | import org.opencv.core.Core;
7 | import org.opencv.core.CvType;
8 | import org.opencv.core.Mat;
9 | import org.opencv.core.MatOfFloat;
10 | import org.opencv.core.MatOfInt;
11 | import org.opencv.core.Point;
12 | import org.opencv.core.Rect;
13 | import org.opencv.core.RotatedRect;
14 | import org.opencv.core.Scalar;
15 | import org.opencv.core.Size;
16 | import org.opencv.core.TermCriteria;
17 | import org.opencv.imgproc.Imgproc;
18 | import org.opencv.video.Video;
19 |
20 | import android.util.Log;
21 |
22 | public class CAMShiftDetection {
23 |
24 | private static final boolean DEBUG = true;
25 | private static final String TAG = "CamShiftDetection";
26 |
27 | private int g_erosion_level = 10;
28 | private int g_erosion_kernel_size = 4;
29 | private int g_termcrit_count = 10;
30 | private double g_termcrit_eps = 0.01;
31 | private Mat g_hist = new Mat();
32 | private Rect g_initialWindow = new Rect();
33 | private Rect g_firstWindow;
34 |
35 | public CAMShiftDetection(Mat targetImage, Rect initialWindow,
36 | int erosion_level, int erosion_kernel_size, int termcrit_count,
37 | double termcrit_eps) {
38 | g_erosion_level = erosion_level;
39 | g_erosion_kernel_size = erosion_kernel_size;
40 | g_termcrit_count = termcrit_count;
41 | g_termcrit_eps = termcrit_eps;
42 | g_initialWindow = initialWindow;
43 | g_firstWindow = new Rect(initialWindow.tl(), initialWindow.br());
44 |
45 | g_hist = getImageHistogram(getHue(targetImage), targetImage.size(), 10,
46 | 0, 180);
47 |
48 | }
49 |
50 | public RotatedRect CAMShift(Mat in) {
51 |
52 | Mat backProjection = getBackProjection(getHue(in), g_hist, 0, 180, 1.0);
53 |
54 | Mat clarifiedBackProjection = clarifyDetectedAreas(backProjection,
55 | g_erosion_kernel_size, g_erosion_level);
56 |
57 | validateWindow();
58 |
59 | RotatedRect rr = doCamShift(clarifiedBackProjection, g_initialWindow,
60 | g_termcrit_count, g_termcrit_eps);
61 |
62 | g_initialWindow = rr.boundingRect();
63 |
64 | return rr;
65 |
66 | }
67 |
68 | private void validateWindow() {
69 | if ((g_initialWindow.width < 0) || (g_initialWindow.height < 0)
70 | || (g_initialWindow.width * g_initialWindow.height < 10)) {
71 | if (DEBUG)
72 | Log.d(TAG, "detection window too small - resetting");
73 |
74 | if (DEBUG)
75 | Log.d(TAG, "g first wndow " + g_firstWindow.toString());
76 | g_initialWindow = new Rect(g_firstWindow.tl(), g_firstWindow.br());
77 | }
78 | }
79 |
80 | private Mat getHue(Mat in) {
81 | Mat out = new Mat(in.size(), CvType.CV_8UC1);
82 | Mat hueImage = new Mat(in.size(), in.type());
83 |
84 | Imgproc.cvtColor(in, hueImage, Imgproc.COLOR_RGB2HSV);
85 | Core.extractChannel(hueImage, out, 0);
86 |
87 | return out;
88 | }
89 |
90 | private Mat getImageHistogram(Mat huesImage, Size size, int buckets,
91 | float minRange, float maxRange) {
92 |
93 | Mat hist = new Mat();
94 |
95 | MatOfFloat ranges = new MatOfFloat(minRange, maxRange);
96 |
97 | List planes = new ArrayList();
98 | planes.add(huesImage);
99 |
100 | MatOfInt chans = new MatOfInt(0);
101 | MatOfInt histSize = new MatOfInt(buckets);
102 |
103 | Imgproc.calcHist(planes, chans, new Mat(), hist, histSize, ranges);
104 |
105 | return hist;
106 |
107 | }
108 |
109 | private Mat clarifyDetectedAreas(Mat in, int erosion_kernel_size,
110 | int erosion_level) {
111 | Mat out = new Mat(in.size(), in.type());
112 |
113 | Mat eroded_kernel = Imgproc.getStructuringElement(Imgproc.MORPH_RECT,
114 | new Size(erosion_kernel_size, erosion_kernel_size), new Point(
115 | erosion_kernel_size / 2, erosion_kernel_size / 2));
116 |
117 | Imgproc.erode(in, out, eroded_kernel, new Point(-1, -1), erosion_level,
118 | Imgproc.BORDER_DEFAULT, new Scalar(0));
119 |
120 | return out;
121 | }
122 |
123 | private RotatedRect doCamShift(Mat in, Rect initialWindow,
124 | int termcrit_count, double termcrit_eps) {
125 |
126 | TermCriteria termcrit = new TermCriteria(TermCriteria.MAX_ITER
127 | | TermCriteria.EPS, termcrit_count, termcrit_eps);
128 |
129 | RotatedRect rr = Video.CamShift(in, initialWindow, termcrit);
130 |
131 | return rr;
132 |
133 | }
134 |
135 | private Mat getBackProjection(Mat in, Mat histogram, int minRange,
136 | int maxRange, double scale) {
137 | ArrayList images = new ArrayList();
138 | images.add(in);
139 |
140 | Mat backproject = new Mat(in.size(), CvType.CV_8UC1);
141 |
142 | Imgproc.calcBackProject(images, new MatOfInt(0), histogram,
143 | backproject, new MatOfFloat(minRange, maxRange), scale);
144 |
145 | return backproject;
146 | }
147 |
148 | }
149 |
--------------------------------------------------------------------------------
/src/com/timegalore/motiondetectionar/MotionDetectionActivity.java:
--------------------------------------------------------------------------------
1 | package com.timegalore.motiondetectionar;
2 |
3 | import java.io.File;
4 | import java.text.DecimalFormat;
5 |
6 | import org.opencv.android.BaseLoaderCallback;
7 | import org.opencv.android.CameraBridgeViewBase;
8 | import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame;
9 | import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2;
10 | import org.opencv.android.JavaCameraView;
11 | import org.opencv.android.LoaderCallbackInterface;
12 | import org.opencv.android.OpenCVLoader;
13 | import org.opencv.core.Core;
14 | import org.opencv.core.Mat;
15 | import org.opencv.core.Point;
16 | import org.opencv.core.Rect;
17 | import org.opencv.core.RotatedRect;
18 | import org.opencv.core.Scalar;
19 | import org.opencv.highgui.Highgui;
20 | import org.opencv.imgproc.Imgproc;
21 |
22 | import rajawali.RajawaliActivity;
23 | import android.content.Context;
24 | import android.hardware.Sensor;
25 | import android.hardware.SensorEvent;
26 | import android.hardware.SensorEventListener;
27 | import android.hardware.SensorManager;
28 | import android.os.Bundle;
29 | import android.os.Environment;
30 | import android.util.Log;
31 | import android.view.Menu;
32 | import android.view.MenuItem;
33 | import android.view.MotionEvent;
34 | import android.view.View;
35 | import android.view.View.OnTouchListener;
36 | import android.view.WindowManager;
37 |
38 | public class MotionDetectionActivity extends RajawaliActivity implements
39 | CvCameraViewListener2, OnTouchListener, SensorEventListener {
40 |
41 | private OpenGLRenderer mRenderer;
42 |
43 | private static final boolean DEBUG = true;
44 | private static final String TAG = "ImageManipulationsActivity";
45 |
46 | public static final int VIEW_MODE_CAPTUREIMAGE = 2;
47 | public static final int VIEW_MODE_SHOWIMAGE = 3;
48 | public static final int VIEW_MODE_CAMSHIFT = 8;
49 |
50 | private MenuItem mItemPreviewCaptureImage;
51 | private MenuItem mItemPreviewSampleImage;
52 | private MenuItem mItemCamShift;
53 | private CameraBridgeViewBase mOpenCvCameraView;
54 |
55 | private Mat loadedImage = null;
56 |
57 | private Mat mRgba;
58 |
59 | private boolean showThumbs = true;
60 | private boolean showEllipse = true;
61 |
62 | public static int viewMode = VIEW_MODE_CAMSHIFT;
63 |
64 | private CAMShiftDetection csd;
65 | private MotionFlowDetection mfd;
66 |
67 | // Accelerometer
68 | SensorManager mSensor = null;
69 | private static int mSensorX;
70 | private static int mSensorY;
71 | private static int mSensorZ;
72 | // Accelerometer
73 |
74 | private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
75 | @Override
76 | public void onManagerConnected(int status) {
77 | switch (status) {
78 | case LoaderCallbackInterface.SUCCESS: {
79 | Log.i(TAG, "OpenCV loaded successfully");
80 | mOpenCvCameraView.enableView();
81 |
82 | Log.d(TAG, "loading file");
83 |
84 | loadedImage = loadImageFromFile("red.jpg");
85 |
86 | Rect initialWindow = new Rect(loadedImage.width() / 3,
87 | loadedImage.height() / 3, loadedImage.width() * 2 / 3,
88 | loadedImage.height() * 2 / 3);
89 |
90 | csd = new CAMShiftDetection(loadedImage, initialWindow, 10, 4,
91 | 10, 0.01);
92 |
93 | }
94 | break;
95 | default: {
96 | super.onManagerConnected(status);
97 | }
98 | break;
99 | }
100 | }
101 | };
102 |
103 | public MotionDetectionActivity() {
104 | Log.i(TAG, "Instantiated new " + this.getClass());
105 | }
106 |
107 | /** Called when the activity is first created. */
108 | @Override
109 | public void onCreate(Bundle savedInstanceState) {
110 | Log.i(TAG, "called onCreate");
111 | super.onCreate(savedInstanceState);
112 |
113 | getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
114 |
115 | mOpenCvCameraView = (CameraBridgeViewBase) new JavaCameraView(this, -1);
116 | mOpenCvCameraView.setCvCameraViewListener(this);
117 |
118 | mLayout.addView(mOpenCvCameraView);
119 |
120 | mSurfaceView.setZOrderMediaOverlay(true);
121 | setGLBackgroundTransparent(true);
122 | mRenderer = new OpenGLRenderer(this);
123 | mRenderer.setSurfaceView(mSurfaceView);
124 | super.setRenderer(mRenderer);
125 |
126 | mRenderer.setCameraPosition(0, 0, 20);
127 |
128 | }
129 |
130 | @Override
131 | public void onPause() {
132 | super.onPause();
133 | if (mOpenCvCameraView != null)
134 | mOpenCvCameraView.disableView();
135 | }
136 |
137 | @Override
138 | public void onResume() {
139 | super.onResume();
140 | OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_3, this,
141 | mLoaderCallback);
142 |
143 | mOpenCvCameraView.setOnTouchListener(this);
144 |
145 | initialiseSensor();
146 |
147 | }
148 |
149 | public void onDestroy() {
150 | super.onDestroy();
151 | if (mOpenCvCameraView != null)
152 | mOpenCvCameraView.disableView();
153 | }
154 |
155 | @Override
156 | public boolean onCreateOptionsMenu(Menu menu) {
157 | Log.i(TAG, "called onCreateOptionsMenu");
158 |
159 | mItemPreviewCaptureImage = menu.add("Capture Image");
160 | mItemPreviewSampleImage = menu.add("Sample Image");
161 | mItemCamShift = menu.add("Cam Shift");
162 |
163 | return true;
164 | }
165 |
166 | @Override
167 | public boolean onOptionsItemSelected(MenuItem item) {
168 | if (DEBUG)
169 | Log.d(TAG, "called onOptionsItemSelected; selected item: " + item);
170 |
171 | if (item == mItemPreviewCaptureImage)
172 | viewMode = VIEW_MODE_CAPTUREIMAGE;
173 | else if (item == mItemPreviewSampleImage)
174 | viewMode = VIEW_MODE_SHOWIMAGE;
175 | else if (item == mItemCamShift)
176 | viewMode = VIEW_MODE_CAMSHIFT;
177 | return true;
178 | }
179 |
180 | public void onCameraViewStarted(int width, int height) {
181 |
182 | mRgba = new Mat();
183 |
184 | }
185 |
186 | public void onCameraViewStopped() {
187 | // Explicitly deallocate Mats
188 |
189 | if (mRgba != null)
190 | mRgba.release();
191 |
192 | mRgba = null;
193 |
194 | }
195 |
196 | public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
197 |
198 | DecimalFormat df = new DecimalFormat("#.##");
199 |
200 | mRgba = inputFrame.rgba();
201 |
202 | switch (MotionDetectionActivity.viewMode) {
203 |
204 | case MotionDetectionActivity.VIEW_MODE_CAPTUREIMAGE:
205 |
206 | int w = mRgba.width();
207 | int h = mRgba.height();
208 |
209 | Core.rectangle(mRgba, new Point(w * 1 / 3, h * 1 / 3), new Point(
210 | w * 2 / 3, h * 2 / 3), new Scalar(255, 0, 0, 255));
211 |
212 | break;
213 |
214 | case MotionDetectionActivity.VIEW_MODE_SHOWIMAGE:
215 |
216 | Imgproc.resize(loadedImage, mRgba, mRgba.size());
217 |
218 | break;
219 |
220 | case MotionDetectionActivity.VIEW_MODE_CAMSHIFT:
221 |
222 | RotatedRect rr = csd.CAMShift(mRgba);
223 |
224 | if (showEllipse)
225 | Core.ellipse(mRgba, rr, new Scalar(255, 255, 0), 5);
226 |
227 | if (mfd == null)
228 | mfd = new MotionFlowDetection(mRgba.size());
229 |
230 | int leftRightRot = mfd.motionFlowDetection(mRgba);
231 |
232 | Core.putText(mRgba, "x: " + (int) rr.center.x + " x: " + mSensorX
233 | + " y: " + mSensorY + " z: " + mSensorZ + " r: "
234 | + leftRightRot, new Point(0, 30),
235 | Core.FONT_HERSHEY_COMPLEX, 1, new Scalar(255, 0, 0, 255), 2);
236 |
237 | if (mRenderer.isReady())
238 | augmentImage(mRgba, rr, mSensorX, mSensorY, mSensorZ,
239 | leftRightRot);
240 |
241 | break;
242 |
243 | }
244 |
245 | return mRgba;
246 | }
247 |
248 | private void augmentImage(Mat mRgba, RotatedRect rr, int mSensorX,
249 | int mSensorY, int mSensorZ, int leftRightRot) {
250 |
251 | // X is to right of device when facing it
252 | // Y is though top of device when facing it
253 | // Z is coming straight out of the screen when facing it
254 |
255 | // draw line through the centre of the object along the z axis
256 | // with phone vertical the in line would be straight up and down
257 | // and rotation left/right would cause line angle to change
258 |
259 | // Front/Back rotate is simply Z
260 | // rotation clock/anticlockwise is slight harder
261 | // but doesn't involve Z
262 | // in landscape: X is 10 and Y is 0
263 | // in portrait X is 0 and Y is 10
264 | // in upside down landscape: X is -10 and Y is 0
265 | // in upside down portrait: X is 0 and Y is -10
266 | // so angle of rotation where normal portrait is say 0 degrees
267 | // is: ATAN2(Y,-X)+PI/2
268 | // left/right movement is Y but depends on the clock
269 | // rotation so need to factor this in as Y * Cos (angle)
270 |
271 | Point centre = rr.center;
272 |
273 | double lrTiltAngleInRadians = Math.atan2(mSensorY, mSensorX);
274 |
275 | double fbTiltAngleInRadians = Math.PI / 2 * Math.sin(mSensorZ / 10.0);
276 |
277 | // due to limitations on sensor information, the phone cannot
278 | // distinguish between, say, a landscape view from the top with
279 | // an inverted landscape view from the bottom - ie. the sensors
280 | // will show all the same readings in both case
281 | // the trick therefore is to use sign of the Z setting to flip
282 | // the object when X becomes negative.
283 | if ((mSensorX < 0) && (mSensorZ > 0)) {
284 | // fbTiltAngleInRadians += Math.PI;
285 | // lrTiltAngleInRadians = -lrTiltAngleInRadians;
286 | fbTiltAngleInRadians += Math.PI;
287 | mRenderer.setSpin(0);
288 |
289 | }
290 |
291 | DecimalFormat df = new DecimalFormat("#.##");
292 |
293 | if (DEBUG)
294 | Log.d(TAG,
295 | "x:" + mSensorX + " y:" + mSensorY + " z:" + mSensorZ
296 | + " rot:" + df.format(lrTiltAngleInRadians)
297 | + " fb:" + df.format(fbTiltAngleInRadians) + " lr:"
298 | + df.format(leftRightRot));
299 |
300 | setPosition(centre.x, centre.y);
301 |
302 | mRenderer.setCamLRTilt(-lrTiltAngleInRadians);
303 | mRenderer.setCamFBTilt(-fbTiltAngleInRadians);
304 |
305 | double cs = rr.size.width > rr.size.height ? rr.size.width
306 | : rr.size.height;
307 |
308 | cs = Math.sqrt(rr.boundingRect().area());
309 |
310 | mRenderer.setCubeSize(2 * cs / 480); // 0.6 for pegasus
311 |
312 | }
313 |
314 | public void printMatDetails(String name, Mat m) {
315 |
316 | Log.d(TAG,
317 | name + " - " + "c:" + m.channels() + ",cols:" + m.cols()
318 | + ",dep:" + m.depth() + ",rows:" + m.rows() + ",type:"
319 | + m.type() + ",w:" + m.width() + ",h:" + m.height());
320 |
321 | }
322 |
323 | @Override
324 | public boolean onTouch(View v, MotionEvent event) {
325 | if (DEBUG)
326 | Log.d(TAG, "got touch " + event.getAction());
327 |
328 | float x = event.getX();
329 | float y = event.getY();
330 |
331 | if (DEBUG)
332 | Log.d(TAG, "x=" + x + ",y=" + y);
333 |
334 | // setPosition(x, y);
335 |
336 | if (DEBUG)
337 | Log.d(TAG, "object pos: "
338 | + mRenderer.get3DObjectPosition().toString());
339 |
340 | if (viewMode == VIEW_MODE_CAPTUREIMAGE) {
341 |
342 | int w = mRgba.width();
343 | int h = mRgba.height();
344 |
345 | // +1 to x,y to avoid cutting the red line of the viewfinder box
346 | Rect roi = new Rect(new Point(w * 1 / 3 + 1, h * 1 / 3 + 1),
347 | new Point(w * 2 / 3, h * 2 / 3));
348 |
349 | Mat viewFinder = mRgba.submat(roi);
350 |
351 | Imgproc.resize(viewFinder, loadedImage, loadedImage.size());
352 |
353 | Rect initialWindow = new Rect(loadedImage.width() / 3,
354 | loadedImage.height() / 3, loadedImage.width() * 2 / 3,
355 | loadedImage.height() * 2 / 3);
356 |
357 | csd = new CAMShiftDetection(loadedImage, initialWindow, 10, 4, 10,
358 | 0.01);
359 |
360 | }
361 |
362 | if (viewMode == VIEW_MODE_CAMSHIFT) {
363 | showEllipse = !showEllipse;
364 | }
365 |
366 | return false;
367 | }
368 |
369 | private void setPosition(double x, double y) {
370 | double cD = mRenderer.getCurrentCamera().getZ();
371 |
372 | float yVP = mRenderer.getViewportHeight();
373 | float xVP = mRenderer.getViewportWidth();
374 |
375 | // =(K16-xVP/2)* (cD/xVP)
376 | // =(K17-yVP/2)* (cD/2/yVP)
377 |
378 | double sx = 0.7;
379 | double sy = 1.3;
380 |
381 | double obx = (x - xVP / 2) * (cD / sx / xVP);
382 | double oby = (yVP / 2 - y) * (cD / sy / yVP);
383 |
384 | mRenderer.set3DObjectPosition(obx, oby, 0);
385 |
386 | }
387 |
388 | public Mat loadImageFromFile(String fileName) {
389 |
390 | Mat rgbLoadedImage = null;
391 |
392 | File root = Environment.getExternalStorageDirectory();
393 | File file = new File(root, fileName);
394 |
395 | // this should be in BGR format according to the
396 | // documentation.
397 | Mat image = Highgui.imread(file.getAbsolutePath());
398 |
399 | if (image.width() > 0) {
400 |
401 | rgbLoadedImage = new Mat(image.size(), image.type());
402 |
403 | Imgproc.cvtColor(image, rgbLoadedImage, Imgproc.COLOR_BGR2RGB);
404 |
405 | if (DEBUG)
406 | Log.d(TAG, "loadedImage: " + "chans: " + image.channels()
407 | + ", (" + image.width() + ", " + image.height() + ")");
408 |
409 | image.release();
410 | image = null;
411 | }
412 |
413 | return rgbLoadedImage;
414 |
415 | }
416 |
417 | public void writeImageToFile(Mat image, String filename) {
418 |
419 | File root = Environment.getExternalStorageDirectory();
420 | File file = new File(root, filename);
421 |
422 | Highgui.imwrite(file.getAbsolutePath(), image);
423 |
424 | if (DEBUG)
425 | Log.d(TAG,
426 | "writing: " + file.getAbsolutePath() + " (" + image.width()
427 | + ", " + image.height() + ")");
428 | }
429 |
430 | private void initialiseSensor() {
431 | if (mSensor == null)
432 | mSensor = (SensorManager) getSystemService(Context.SENSOR_SERVICE);
433 |
434 | mSensor.registerListener(this,
435 | mSensor.getDefaultSensor(Sensor.TYPE_ACCELEROMETER),
436 | SensorManager.SENSOR_DELAY_GAME);
437 |
438 | }
439 |
440 | @Override
441 | public void onAccuracyChanged(Sensor arg0, int arg1) {
442 | }
443 |
444 | @Override
445 | public void onSensorChanged(SensorEvent event) {
446 |
447 | float vals[] = event.values;
448 |
449 | mSensorX = (int) vals[0];
450 | mSensorY = (int) vals[1];
451 | mSensorZ = (int) vals[2];
452 |
453 | }
454 | }
455 |
--------------------------------------------------------------------------------
/src/com/timegalore/motiondetectionar/MotionFlowDetection.java:
--------------------------------------------------------------------------------
1 | package com.timegalore.motiondetectionar;
2 |
3 | import org.opencv.core.Core;
4 | import org.opencv.core.CvType;
5 | import org.opencv.core.Mat;
6 | import org.opencv.core.MatOfByte;
7 | import org.opencv.core.MatOfFloat;
8 | import org.opencv.core.MatOfPoint;
9 | import org.opencv.core.MatOfPoint2f;
10 | import org.opencv.core.Point;
11 | import org.opencv.core.Scalar;
12 | import org.opencv.core.Size;
13 | import org.opencv.imgproc.Imgproc;
14 | import org.opencv.video.Video;
15 |
16 | public class MotionFlowDetection {
17 |
18 | private Mat mGray1 = null;
19 | private Mat mGray2 = null;
20 | MatOfPoint initial = null;
21 | MatOfByte status = null;
22 | MatOfFloat err = null;
23 | MatOfPoint2f prevPts = null;
24 | MatOfPoint2f nextPts = null;
25 | int maxCorners;
26 | Size imageSize;
27 |
28 | public MotionFlowDetection(Size s) {
29 | imageSize = s;
30 | }
31 |
32 | public Point motionFlowDetection(Mat prevImage, Mat nextImage) {
33 |
34 | Point direction = null;
35 |
36 | setOpticalFlowParameters(prevImage);
37 |
38 | Mat resultImage = prevImage.clone();
39 |
40 | Imgproc.cvtColor(prevImage, mGray1, Imgproc.COLOR_RGBA2GRAY);
41 | Imgproc.cvtColor(nextImage, mGray2, Imgproc.COLOR_RGBA2GRAY);
42 |
43 | Imgproc.goodFeaturesToTrack(mGray1, initial, 1000, 0.01, 5);
44 |
45 | initial.convertTo(prevPts, CvType.CV_32FC2);
46 |
47 | Video.calcOpticalFlowPyrLK(mGray1, mGray2, prevPts, nextPts, status,
48 | err);
49 |
50 | Point[] pointp = prevPts.toArray();
51 | Point[] pointn = nextPts.toArray();
52 |
53 | markPointsOnImage(resultImage, pointp, pointn);
54 |
55 | direction = getAverageDirection(pointp, pointn);
56 |
57 | return direction;
58 |
59 | }
60 |
61 | private Point getAverageDirection(Point[] pointp, Point[] pointn) {
62 |
63 | Point p = new Point();
64 |
65 | int nosOfPoints = pointp.length;
66 |
67 | for (int i = 0; i < nosOfPoints; i++) {
68 | p.x += pointp[i].x - pointn[i].x;
69 | p.y += pointp[i].y - pointn[i].y;
70 | }
71 |
72 | p.x = p.x / nosOfPoints;
73 | p.y = p.y / nosOfPoints;
74 |
75 | return p;
76 |
77 | }
78 |
79 | private void markPointsOnImage(Mat resultImage, Point[] pointp,
80 | Point[] pointn) {
81 |
82 | for (int i = 0; i < pointp.length; i++) {
83 |
84 | int distanceX = (int) Math.abs(pointn[i].x - pointp[i].x);
85 | int distanceY = (int) Math.abs(pointn[i].y - pointp[i].y);
86 |
87 | Core.circle(resultImage, pointn[i], 10, new Scalar(255, 0, 0, 255));
88 | }
89 |
90 | }
91 |
92 | public int motionFlowDetection(Mat image) {
93 |
94 | setOpticalFlowParameters(image);
95 |
96 | mGray1 = mGray2.clone();
97 |
98 | Imgproc.cvtColor(image, mGray2, Imgproc.COLOR_RGBA2GRAY);
99 |
100 | Imgproc.goodFeaturesToTrack(mGray1, initial, maxCorners, 0.01, 5);
101 |
102 | initial.convertTo(prevPts, CvType.CV_32FC2);
103 |
104 | Video.calcOpticalFlowPyrLK(mGray1, mGray2, prevPts, nextPts, status,
105 | err);
106 |
107 | Point[] pointp = prevPts.toArray();
108 | Point[] pointn = nextPts.toArray();
109 |
110 | int dir = calculateVelocityFromMotionFlow(pointn, pointp);
111 |
112 | return removeOutliersAndNoise(dir);
113 | }
114 |
115 | private int removeOutliersAndNoise(int v) {
116 |
117 | if ((v < 0) && (v > -10))
118 | v = 0;
119 |
120 | if ((v > 0) && (v < 10))
121 | v = 0;
122 |
123 | if ((v < 0) && (v < -80))
124 | v = -80;
125 | if ((v > 0) && (v > 80))
126 | v = 80;
127 |
128 | return v;
129 | }
130 |
131 | private int calculateVelocityFromMotionFlow(Point[] pointn, Point[] pointp) {
132 |
133 | // find the average difference from all the analysed points. The sign of
134 | // the
135 | // average gives you the direction
136 |
137 | int points = pointn.length;
138 |
139 | int total = 0;
140 |
141 | for (int i = 0; i < points; i++) {
142 |
143 | total += pointn[i].x - pointp[i].x;
144 |
145 | }
146 |
147 | return (int) total / points;
148 | }
149 |
150 | private void setOpticalFlowParameters(Mat image) {
151 |
152 | if (mGray1 == null)
153 | mGray1 = new Mat(imageSize, CvType.CV_8UC1);
154 | if (mGray2 == null) {
155 | mGray2 = new Mat(imageSize, CvType.CV_8UC1);
156 | Imgproc.cvtColor(image, mGray2, Imgproc.COLOR_RGBA2GRAY);
157 |
158 | }
159 |
160 | initial = new MatOfPoint();
161 | status = new MatOfByte();
162 | err = new MatOfFloat();
163 | prevPts = new MatOfPoint2f();
164 | nextPts = new MatOfPoint2f();
165 |
166 | maxCorners = 10;
167 |
168 | }
169 |
170 | }
171 |
--------------------------------------------------------------------------------
/src/com/timegalore/motiondetectionar/OpenGLRenderer.java:
--------------------------------------------------------------------------------
1 | package com.timegalore.motiondetectionar;
2 |
3 | import javax.microedition.khronos.opengles.GL10;
4 |
5 | import rajawali.Object3D;
6 | import rajawali.lights.DirectionalLight;
7 | import rajawali.math.vector.Vector3;
8 | import rajawali.parser.LoaderOBJ;
9 | import rajawali.parser.ParsingException;
10 | import rajawali.renderer.RajawaliRenderer;
11 | import android.content.Context;
12 | import android.util.Log;
13 |
14 | public class OpenGLRenderer extends RajawaliRenderer {
15 |
16 | private static final boolean DEBUG = true;
17 | private static final String TAG = "OpenGLRenderer";
18 |
19 | private DirectionalLight mLight;
20 |
21 | private Object3D m3DObject;
22 |
23 | private static final double rtod = 180 / Math.PI;
24 |
25 | public OpenGLRenderer(Context context) {
26 | super(context);
27 | setFrameRate(60);
28 | }
29 |
30 | public void initScene() {
31 | mLight = new DirectionalLight(1f, 0.2f, -1.0f); // set the direction
32 | mLight.setColor(1.0f, 1.0f, 1.0f);
33 | mLight.setPower(2);
34 |
35 | LoaderOBJ objParser = new LoaderOBJ(mContext.getResources(),
36 | mTextureManager, R.raw.block10_obj);
37 |
38 | try {
39 | objParser.parse();
40 | } catch (ParsingException e) {
41 |
42 | e.printStackTrace();
43 |
44 | }
45 |
46 | m3DObject = objParser.getParsedObject();
47 | m3DObject.setPosition(0, 0, 0);
48 |
49 | addChild(m3DObject);
50 |
51 | }
52 |
53 | @Override
54 | public void onDrawFrame(GL10 glUnused) {
55 | super.onDrawFrame(glUnused);
56 |
57 | }
58 |
59 | public void set3DObjectPosition(double x, double y, double z) {
60 |
61 | if (m3DObject != null)
62 | m3DObject.setPosition(x, y, z);
63 | }
64 |
65 | public Vector3 get3DObjectPosition() {
66 |
67 | return m3DObject.getPosition();
68 | }
69 |
70 | public void setCameraPosition(double x, double y, double z) {
71 |
72 | getCurrentCamera().setX(x);
73 | getCurrentCamera().setY(y);
74 | getCurrentCamera().setZ(z);
75 |
76 | }
77 |
78 | public void setCamLRTilt(double lrTiltAngleInRadians) {
79 | getCurrentCamera().setRotZ(-lrTiltAngleInRadians * rtod);
80 |
81 | }
82 |
83 | public void setCamFBTilt(double fbTiltAngleInRadians) {
84 | getCurrentCamera().setRotX(-fbTiltAngleInRadians * rtod);
85 |
86 | }
87 |
88 | public void setCubeSize(double d) {
89 | m3DObject.setScale(d);
90 | }
91 |
92 | public Vector3 getCubeSize() {
93 | return m3DObject.getScale();
94 | }
95 |
96 | public void setLRTilt(double lrTiltAngleInRadians) {
97 | m3DObject.setRotZ(-lrTiltAngleInRadians * rtod);
98 |
99 | }
100 |
101 | public void setFBTilt(double fbTiltAngleInRadians) {
102 | m3DObject.setRotX(-fbTiltAngleInRadians * rtod);
103 |
104 | }
105 |
106 | public void setSpin(double spinAngleInDegrees) {
107 |
108 | m3DObject.setRotY(m3DObject.getRotY() + spinAngleInDegrees);
109 |
110 | if (DEBUG)
111 | Log.d(TAG, "getRotY: " + m3DObject.getRotY());
112 |
113 | }
114 |
115 | public boolean isReady() {
116 | return (m3DObject != null);
117 | }
118 |
119 | }
120 |
--------------------------------------------------------------------------------