├── clean.sh ├── jni ├── Application.mk ├── PCOpenCVTest │ ├── CMakeLists.txt │ ├── main.cpp │ └── PCOpenCVTest.xcodeproj │ │ └── project.pbxproj ├── Android.mk ├── charactertracker.i ├── Detector.i ├── CMakeLists.txt ├── Detector.h ├── jni_part.cpp └── Detector.cpp ├── CMakeLists.txt ├── cmake_armeabi.sh ├── cmake_run_from_build.sh ├── src └── edu │ └── mit │ └── media │ └── fluid │ └── royshil │ ├── headfollower │ ├── IBitmapHolder.java │ ├── IMarkerShower.java │ ├── ICharacterStateHandler.java │ ├── ClearRenderer.java │ ├── TransformableImageView.java │ ├── FileFrameProcessor.java │ ├── BitmapDrawerSurfaceView.java │ ├── CharacterRenderer.java │ ├── FrameProcessorBase.java │ ├── CameraFrameProcessor.java │ ├── CharacterTrackerView.java │ ├── Sensors.java │ └── HeadFollower.java │ └── graphics │ ├── MyAnimations.java │ └── MyCanvasView.java ├── default.properties ├── README └── AndroidManifest.xml /clean.sh: -------------------------------------------------------------------------------- 1 | make OPENCV_ROOT=../../opencv V=0 clean 2 | -------------------------------------------------------------------------------- /jni/Application.mk: -------------------------------------------------------------------------------- 1 | APP_STL := gnustl_static 2 | APP_CPPFLAGS := -frtti -fexceptions 3 | APP_ABI := armeabi 4 | -------------------------------------------------------------------------------- /CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 2.8) 2 | 3 | project(HeadFollower) 4 | 5 | add_subdirectory(jni) 6 | -------------------------------------------------------------------------------- /cmake_armeabi.sh: -------------------------------------------------------------------------------- 1 | cmake -DARM_TARGET="armeabi" -DCMAKE_TOOLCHAIN_FILE=$OPENCV_PACKAGE_DIR/android.toolchain.cmake .. 2 | make clean ; make -j4 3 | -------------------------------------------------------------------------------- /cmake_run_from_build.sh: -------------------------------------------------------------------------------- 1 | cmake -DARM_TARGET="armeabi-v7a with NEON" -DCMAKE_TOOLCHAIN_FILE=$OPENCV_PACKAGE_DIR/android.toolchain.cmake .. 2 | make clean ; make -j4 3 | -------------------------------------------------------------------------------- /src/edu/mit/media/fluid/royshil/headfollower/IBitmapHolder.java: -------------------------------------------------------------------------------- 1 | package edu.mit.media.fluid.royshil.headfollower; 2 | 3 | import android.graphics.Bitmap; 4 | 5 | public interface IBitmapHolder { 6 | 7 | public abstract Bitmap getBmp(); 8 | 9 | public abstract void setBmp(Bitmap bmp); 10 | 11 | } -------------------------------------------------------------------------------- /src/edu/mit/media/fluid/royshil/headfollower/IMarkerShower.java: -------------------------------------------------------------------------------- 1 | package edu.mit.media.fluid.royshil.headfollower; 2 | 3 | public interface IMarkerShower { 4 | 5 | void showMarker(); 6 | 7 | void removeMarker(); 8 | 9 | void showCharacter(); 10 | 11 | void showCalibrationMessage(); 12 | 13 | } 14 | -------------------------------------------------------------------------------- /src/edu/mit/media/fluid/royshil/headfollower/ICharacterStateHandler.java: -------------------------------------------------------------------------------- 1 | package edu.mit.media.fluid.royshil.headfollower; 2 | 3 | public interface ICharacterStateHandler { 4 | 5 | public abstract void onCharacterStateChanged(float[] state); 6 | 7 | public abstract void onCalibrationStateChanged(int[] currentState); 8 | 9 | } -------------------------------------------------------------------------------- /jni/PCOpenCVTest/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 2.8) 2 | 3 | project(HeadFollower-PCTest) 4 | 5 | find_package(OpenCV REQUIRED) 6 | message("OpenCV dir: " ${OpenCV_DIR}) 7 | 8 | include_directories(${OpenCV_INCLUDE_DIRS} ..) 9 | 10 | add_definitions(-D_PC_COMPILE) 11 | add_executable(HeadFollower-PCTest main.cpp ../Detector.cpp) 12 | 13 | target_link_libraries(HeadFollower-PCTest ${OpenCV_LIBS}) 14 | -------------------------------------------------------------------------------- /default.properties: -------------------------------------------------------------------------------- 1 | # This file is automatically generated by Android Tools. 2 | # Do not modify this file -- YOUR CHANGES WILL BE ERASED! 3 | # 4 | # This file must be checked in Version Control Systems. 5 | # 6 | # To customize properties used by the Ant build system use, 7 | # "build.properties", and override values to adapt the script to your 8 | # project structure. 9 | 10 | # Project target. 11 | target=android-7 12 | android.library=true 13 | -------------------------------------------------------------------------------- /jni/Android.mk: -------------------------------------------------------------------------------- 1 | LOCAL_PATH := $(call my-dir) 2 | 3 | include $(CLEAR_VARS) 4 | 5 | OPENCV_MK_PATH:=/Users/royshilkrot/Downloads/opencv-trunk-for-android/opencv/android/build/OpenCV.mk 6 | 7 | ifeq ("$(wildcard $(OPENCV_MK_PATH))","") 8 | #try to load OpenCV.mk from default install location 9 | include $(TOOLCHAIN_PREBUILT_ROOT)/user/share/OpenCV/OpenCV.mk 10 | else 11 | include $(OPENCV_MK_PATH) 12 | endif 13 | 14 | 15 | LOCAL_MODULE := headfollower_native 16 | LOCAL_SRC_FILES := jni_part.cpp Detector.cpp 17 | LOCAL_LDLIBS += -llog -ldl 18 | 19 | include $(BUILD_SHARED_LIBRARY) 20 | 21 | -------------------------------------------------------------------------------- /src/edu/mit/media/fluid/royshil/headfollower/ClearRenderer.java: -------------------------------------------------------------------------------- 1 | package edu.mit.media.fluid.royshil.headfollower; 2 | 3 | import javax.microedition.khronos.egl.EGLConfig; 4 | import javax.microedition.khronos.opengles.GL10; 5 | 6 | import android.opengl.GLSurfaceView.Renderer; 7 | 8 | public class ClearRenderer implements Renderer { 9 | public void onSurfaceCreated(GL10 gl, EGLConfig config) { 10 | // Do nothing special. 11 | } 12 | 13 | public void onSurfaceChanged(GL10 gl, int w, int h) { 14 | gl.glViewport(0, 0, w, h); 15 | } 16 | 17 | public void onDrawFrame(GL10 gl) { 18 | gl.glClearColor(0, 0, 0, 1); 19 | gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT); 20 | 21 | 22 | 23 | } 24 | } -------------------------------------------------------------------------------- /README: -------------------------------------------------------------------------------- 1 | Welcome to the source repository of the PoCoMo project 2 | 3 | High level details: http://fluid.media.mit.edu/people/roy/current/pocomo.html 4 | 5 | Prerequisites: 6 | - OpenCV 2.3.1, compiled for Android: http://opencv.willowgarage.com/wiki/Android 7 | - CMake 8 | 9 | 10 | To build (very very roughly): 11 | - Open Eclipse 12 | - Create a project from existing source, using this directory 13 | - run $ANDROID_NDK/ndk-build from jni/, this should build the required lib*.so file. 14 | - Run via eclipse 15 | 16 | 17 | Note: I am doing my best to move the old codebase into the opensource version of PoCoMo. This means that not all the features mentioned in the paper are implemented yet in this version. Please hang on, as I believe around October there will be another big push. Thanks! Roy. 18 | 19 | To build and run the PC application for testing the tracking algorithm: 20 | cd jni/PCOpenCVTest 21 | cmake . ; make 22 | ./HeadFollower-PCTest 23 | -------------------------------------------------------------------------------- /jni/charactertracker.i: -------------------------------------------------------------------------------- 1 | /* File : android-cv.i */ 2 | 3 | %module charactertracker 4 | 5 | 6 | /* 7 | * the java import code muse be included for the opencv jni wrappers 8 | * this means that the android project must reference opencv/android as a project 9 | * see the default.properties for how this is done 10 | */ 11 | %pragma(java) jniclassimports=%{ 12 | import com.opencv.jni.*; //import the android-opencv jni wrappers 13 | %} 14 | 15 | %pragma(java) jniclasscode=%{ 16 | static { 17 | try { 18 | //load the cvcamera library, make sure that libcvcamera.so is in your /libs/armeabi directory 19 | //so that android sdk automatically installs it along with the app. 20 | 21 | //the android-opencv lib must be loaded first inorder for the cvcamera 22 | //lib to be found 23 | //check the apk generated, by opening it in an archive manager, to verify that 24 | //both these libraries are present 25 | System.loadLibrary("android-opencv"); 26 | System.loadLibrary("charactertracker"); 27 | } catch (UnsatisfiedLinkError e) { 28 | //badness 29 | throw e; 30 | } 31 | } 32 | 33 | %} 34 | 35 | //include the Processor class swig interface file 36 | %include "Detector.i" -------------------------------------------------------------------------------- /src/edu/mit/media/fluid/royshil/headfollower/TransformableImageView.java: -------------------------------------------------------------------------------- 1 | package edu.mit.media.fluid.royshil.headfollower; 2 | 3 | import android.content.Context; 4 | import android.graphics.Canvas; 5 | import android.util.AttributeSet; 6 | import android.widget.ImageView; 7 | 8 | public class TransformableImageView extends ImageView { 9 | public TransformableImageView(Context context) 10 | { 11 | super(context); 12 | } 13 | 14 | public TransformableImageView(Context context, AttributeSet attrs) 15 | { 16 | super(context, attrs); 17 | } 18 | 19 | public TransformableImageView(Context context, AttributeSet attrs, int defStyle) 20 | { 21 | super(context, attrs, defStyle); 22 | } 23 | 24 | public float scale = 1.0f; 25 | public float angle = 0.0f; 26 | public boolean flip = false; 27 | 28 | @Override 29 | protected void onDraw(Canvas canvas) { 30 | canvas.save(); 31 | canvas.scale(this.scale, this.scale); 32 | canvas.translate(canvas.getWidth()*(1-scale)/2.0f, canvas.getHeight()*(1-scale)/2.0f); 33 | canvas.rotate(angle); 34 | if(this.flip) { 35 | canvas.scale(-1.0f,1.0f); 36 | canvas.translate(canvas.getWidth()*-.5f,0); 37 | } 38 | super.onDraw(canvas); 39 | canvas.restore(); 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /AndroidManifest.xml: -------------------------------------------------------------------------------- 1 | 2 | 7 | 8 | 10 | 13 | 14 | 15 | 16 | 17 | 18 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | -------------------------------------------------------------------------------- /jni/Detector.i: -------------------------------------------------------------------------------- 1 | /* 2 | * include the headers required by the generated cpp code 3 | */ 4 | %{ 5 | #include "Detector.h" 6 | #include "image_pool.h" 7 | using namespace cv; 8 | %} 9 | 10 | //import the android-cv.i file so that swig is aware of all that has been previous defined 11 | //notice that it is not an include.... 12 | %import "android-cv.i" 13 | %import "android-cv-typemaps.i" 14 | 15 | //make sure to import the image_pool as it is 16 | //referenced by the Processor java generated 17 | //class 18 | %typemap(javaimports) Detector " 19 | import com.opencv.jni.image_pool; // import the image_pool interface for playing nice with 20 | // android-opencv 21 | 22 | /** Characters Detector */ 23 | " 24 | 25 | class Detector { 26 | public: 27 | vector otherCharacter; 28 | vector selfCharacter; 29 | 30 | Detector() {}; 31 | virtual ~Detector(); 32 | 33 | bool findCharacter(int idx, image_pool* pool, int i_am, bool _flip, bool _debug); 34 | 35 | int getPtX(Point* p) { return p->x;} 36 | int getPtY(Point* p) { return p->y;} 37 | 38 | Point getPointFromVector(vector* v, int idx) { return ((v->size() > idx) ? (*v)[idx] : Point(-1,-1)); } 39 | 40 | double getSizeOfSelf(); 41 | 42 | Point getSelfCenter(); 43 | Point getOtherCenter(); 44 | float getSelfAngle(); 45 | int getWaveTimer(); 46 | }; 47 | -------------------------------------------------------------------------------- /src/edu/mit/media/fluid/royshil/headfollower/FileFrameProcessor.java: -------------------------------------------------------------------------------- 1 | package edu.mit.media.fluid.royshil.headfollower; 2 | 3 | import android.content.Context; 4 | import android.util.AttributeSet; 5 | import android.util.Log; 6 | 7 | public abstract class FileFrameProcessor extends FrameProcessorBase implements Runnable { 8 | private static final String TAG = "FileFrameProcessor"; 9 | protected String filename = "NO_FILENAME"; 10 | 11 | private static int ERROR_FILE_DOESNT_EXIST = -1; 12 | private static int ERROR_FRAME_DATA_NULL = -2; 13 | private static int ERROR_VIDEOCAPTURE_NOT_OPEN = -3; 14 | 15 | public FileFrameProcessor(Context context, AttributeSet attrs) { 16 | super(context, attrs); 17 | } 18 | 19 | public FileFrameProcessor(Context context) { 20 | super(context); 21 | } 22 | 23 | @Override 24 | protected void init() { 25 | Log.i(TAG,"init()"); 26 | int[] ret = OpenFromFile(filename); 27 | if(ret[0] < 0) { 28 | if(ret[0] == ERROR_FILE_DOESNT_EXIST) 29 | throw new RuntimeException("Shit, file doesn't exist"); 30 | if(ret[0] == ERROR_FRAME_DATA_NULL) 31 | throw new RuntimeException("Shit, file was loaded incorrectly and the frame data is null!"); 32 | if(ret[0] == ERROR_VIDEOCAPTURE_NOT_OPEN) 33 | throw new RuntimeException("Shit, file was loaded incorrectly and now it can't be opened"); 34 | } 35 | mFrameWidth = ret[0]; 36 | mFrameHeight = ret[1]; 37 | 38 | (new Thread(this)).start(); //start the FrameProcessorBase thread.. 39 | 40 | //start the thread to peridocally load a frame from the file 41 | Thread t = new Thread(new Runnable() { 42 | @Override 43 | public void run() { 44 | while(FileFrameProcessor.this.mThreadRun) { //TODO: when to stop?? 45 | synchronized (FileFrameProcessor.this) { 46 | FileFrameProcessor.this.notify(); //this will let FrameProcessorBase fire "processFrame" 47 | } 48 | try { 49 | Thread.sleep(30); 50 | } catch (InterruptedException e) { 51 | Log.e(TAG,"interrupted",e); 52 | } 53 | } 54 | } 55 | }); 56 | t.start(); 57 | } 58 | 59 | public native int[] OpenFromFile(String filename); 60 | } 61 | -------------------------------------------------------------------------------- /src/edu/mit/media/fluid/royshil/headfollower/BitmapDrawerSurfaceView.java: -------------------------------------------------------------------------------- 1 | package edu.mit.media.fluid.royshil.headfollower; 2 | 3 | import java.util.concurrent.Semaphore; 4 | 5 | import android.content.Context; 6 | import android.graphics.Bitmap; 7 | import android.graphics.Canvas; 8 | import android.graphics.Paint; 9 | import android.util.AttributeSet; 10 | import android.util.Log; 11 | import android.view.SurfaceHolder; 12 | import android.view.SurfaceView; 13 | 14 | public class BitmapDrawerSurfaceView extends SurfaceView implements IBitmapHolder, SurfaceHolder.Callback { 15 | private static final String TAG = "BitmapDrawerSurfaceView"; 16 | Bitmap bmp; 17 | private SurfaceHolder mHolder; 18 | private Paint paint; 19 | Semaphore bmp_mutex = new Semaphore(1); 20 | 21 | public BitmapDrawerSurfaceView(Context context, AttributeSet attrs) { 22 | super(context, attrs); 23 | mHolder = getHolder(); 24 | mHolder.addCallback(this); 25 | paint = new Paint(); 26 | } 27 | 28 | /* (non-Javadoc) 29 | * @see edu.mit.media.fluid.royshil.headfollower.BitmapHolder#getBmp() 30 | */ 31 | @Override 32 | public Bitmap getBmp() { 33 | return bmp; 34 | } 35 | 36 | /* (non-Javadoc) 37 | * @see edu.mit.media.fluid.royshil.headfollower.BitmapHolder#setBmp(android.graphics.Bitmap) 38 | */ 39 | @Override 40 | public void setBmp(Bitmap bmp) { 41 | try { 42 | bmp_mutex.acquire(); 43 | if(this.bmp != null) { 44 | this.bmp.recycle(); 45 | this.bmp = null; 46 | } 47 | 48 | this.bmp = bmp; 49 | this.postInvalidate(); 50 | bmp_mutex.release(); 51 | } catch (InterruptedException e) { 52 | Log.e(TAG,"cdn't acquire bmp lock"); 53 | } 54 | } 55 | 56 | @Override 57 | public void surfaceChanged(SurfaceHolder holder, int format, int width, 58 | int height) { 59 | Log.i(TAG,"SurfaceChanged"); 60 | } 61 | 62 | @Override 63 | public void surfaceCreated(SurfaceHolder holder) { 64 | Log.i(TAG,"SurfaceCreated"); 65 | } 66 | 67 | @Override 68 | public void surfaceDestroyed(SurfaceHolder holder) { 69 | Log.i(TAG,"SurfaceDestroyed"); 70 | } 71 | 72 | @Override 73 | protected void onDraw(Canvas canvas) { 74 | Log.v(TAG, "OnDraw"); 75 | if(bmp == null) return; 76 | super.onDraw(canvas); 77 | try { 78 | bmp_mutex.acquire(); 79 | if(bmp.isRecycled()) return; 80 | canvas.drawBitmap(bmp, 0,0, paint); 81 | bmp_mutex.release(); 82 | } catch (InterruptedException e) { 83 | Log.e(TAG,"cldn't acquire bmp lock"); 84 | } 85 | } 86 | } 87 | -------------------------------------------------------------------------------- /src/edu/mit/media/fluid/royshil/headfollower/CharacterRenderer.java: -------------------------------------------------------------------------------- 1 | package edu.mit.media.fluid.royshil.headfollower; 2 | 3 | import javax.microedition.khronos.egl.EGLConfig; 4 | import javax.microedition.khronos.opengles.GL10; 5 | 6 | import android.opengl.GLSurfaceView.Renderer; 7 | 8 | public class CharacterRenderer implements Renderer { 9 | 10 | @Override 11 | public void onDrawFrame(GL10 gl) { 12 | /* 13 | * Usually, the first thing one might want to do is to clear 14 | * the screen. The most efficient way of doing this is to use 15 | * glClear(). 16 | */ 17 | 18 | gl.glClearColor(1.0f, 0.0f, 0.0f, 1.0f); 19 | gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT); 20 | 21 | /* 22 | * Now we're ready to draw some 3D objects 23 | */ 24 | 25 | // gl.glMatrixMode(GL10.GL_MODELVIEW); 26 | // gl.glLoadIdentity(); 27 | // gl.glTranslatef(0, 0, -3.0f); 28 | // gl.glRotatef(mAngle, 0, 1, 0); 29 | // gl.glRotatef(mAngle*0.25f, 1, 0, 0); 30 | // 31 | // gl.glEnableClientState(GL10.GL_VERTEX_ARRAY); 32 | // gl.glEnableClientState(GL10.GL_COLOR_ARRAY); 33 | // 34 | // mCube.draw(gl); 35 | // 36 | // gl.glRotatef(mAngle*2.0f, 0, 1, 1); 37 | // gl.glTranslatef(0.5f, 0.5f, 0.5f); 38 | // 39 | // mCube.draw(gl); 40 | 41 | } 42 | 43 | public void onSurfaceChanged(GL10 gl, int width, int height) { 44 | gl.glViewport(0, 0, width, height); 45 | 46 | /* 47 | * Set our projection matrix. This doesn't have to be done 48 | * each time we draw, but usually a new projection needs to 49 | * be set when the viewport is resized. 50 | */ 51 | 52 | float ratio = (float) width / height; 53 | gl.glMatrixMode(GL10.GL_PROJECTION); 54 | gl.glLoadIdentity(); 55 | gl.glFrustumf(-ratio, ratio, -1, 1, 1, 10); 56 | } 57 | 58 | public void onSurfaceCreated(GL10 gl, EGLConfig config) { 59 | /* 60 | * By default, OpenGL enables features that improve quality 61 | * but reduce performance. One might want to tweak that 62 | * especially on software renderer. 63 | */ 64 | gl.glDisable(GL10.GL_DITHER); 65 | 66 | /* 67 | * Some one-time OpenGL initialization can be made here 68 | * probably based on features of this particular context 69 | */ 70 | gl.glHint(GL10.GL_PERSPECTIVE_CORRECTION_HINT, 71 | GL10.GL_FASTEST); 72 | 73 | if (mTranslucentBackground) { 74 | gl.glClearColor(0,0,0,0); 75 | } else { 76 | gl.glClearColor(1,1,1,1); 77 | } 78 | gl.glEnable(GL10.GL_CULL_FACE); 79 | gl.glShadeModel(GL10.GL_SMOOTH); 80 | gl.glEnable(GL10.GL_DEPTH_TEST); 81 | } 82 | private boolean mTranslucentBackground; 83 | } 84 | -------------------------------------------------------------------------------- /src/edu/mit/media/fluid/royshil/graphics/MyAnimations.java: -------------------------------------------------------------------------------- 1 | package edu.mit.media.fluid.royshil.graphics; 2 | 3 | import java.util.HashMap; 4 | 5 | public class MyAnimations { 6 | public class MyAnim { 7 | public String filename; 8 | public int start; 9 | public int end; 10 | public boolean loop; 11 | public MyAnim(String filename, int start, int end) { 12 | super(); 13 | this.filename = filename; 14 | this.start = start; 15 | this.end = end; 16 | this.loop = false; 17 | } 18 | public MyAnim(String filename, int start, int end, boolean loop) { 19 | super(); 20 | this.filename = filename; 21 | this.start = start; 22 | this.end = end; 23 | this.loop = loop; 24 | } 25 | } 26 | 27 | public enum Animations { 28 | TURN, 29 | THREE_QUARTERS_TO_PROFILE, 30 | START_WALK, 31 | END_WALK, 32 | WALK, 33 | SHAKE_HAND, 34 | WAVE, NATURAL 35 | } 36 | 37 | public enum Character { 38 | BLUE, 39 | RED 40 | } 41 | 42 | private static HashMap blue_animation_index = new HashMap(); 43 | private static HashMap red_animation_index = new HashMap(); 44 | private static MyAnimations myAnimations = new MyAnimations(); 45 | 46 | static { 47 | blue_animation_index.put(Animations.TURN, myAnimations.new MyAnim("guy/guy_",329,341)); 48 | // blue_animation_index.put(Animations.THREE_QUARTERS_TO_PROFILE, R.drawable.anim_look_l_to_r); 49 | blue_animation_index.put(Animations.START_WALK, myAnimations.new MyAnim("guy/guy_",37,57)); 50 | blue_animation_index.put(Animations.END_WALK, myAnimations.new MyAnim("guy/guy_",90,111)); 51 | blue_animation_index.put(Animations.WALK, myAnimations.new MyAnim("guy/guy_",58,89,true)); 52 | blue_animation_index.put(Animations.WAVE, myAnimations.new MyAnim("guy/guy_",113,190)); 53 | blue_animation_index.put(Animations.SHAKE_HAND, myAnimations.new MyAnim("guy/guy_",266,325)); 54 | blue_animation_index.put(Animations.NATURAL, myAnimations.new MyAnim("guynatural3q.png",-1,-1)); 55 | 56 | red_animation_index.put(Animations.TURN, myAnimations.new MyAnim("girl/girl_",301,323)); 57 | // red_animation_index.put(Animations.THREE_QUARTERS_TO_PROFILE, R.drawable.anim_look_l_to_r); 58 | red_animation_index.put(Animations.START_WALK, myAnimations.new MyAnim("girl/girl_",45,63)); 59 | red_animation_index.put(Animations.END_WALK, myAnimations.new MyAnim("girl/girl_",97,112)); 60 | red_animation_index.put(Animations.WALK, myAnimations.new MyAnim("girl/girl_",64,96,true)); 61 | red_animation_index.put(Animations.WAVE, myAnimations.new MyAnim("girl/girl_",242,300)); 62 | red_animation_index.put(Animations.SHAKE_HAND, myAnimations.new MyAnim("girl/girl_",160,235)); 63 | red_animation_index.put(Animations.NATURAL, myAnimations.new MyAnim("girl/girl_0035.png",-1,-1)); 64 | } 65 | 66 | public static MyAnim getAnimation(Animations a, Character c) { 67 | MyAnim myAnim = (c == Character.BLUE) ? blue_animation_index.get(a) : red_animation_index.get(a); 68 | return myAnim; 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /src/edu/mit/media/fluid/royshil/headfollower/FrameProcessorBase.java: -------------------------------------------------------------------------------- 1 | package edu.mit.media.fluid.royshil.headfollower; 2 | 3 | import android.content.Context; 4 | import android.graphics.Bitmap; 5 | import android.util.AttributeSet; 6 | import android.util.Log; 7 | import android.view.SurfaceHolder; 8 | import android.view.SurfaceView; 9 | 10 | public abstract class FrameProcessorBase extends SurfaceView implements Runnable, SurfaceHolder.Callback { 11 | private static final String TAG = "FrameProcessorBase"; 12 | 13 | protected boolean mThreadRun; 14 | private IBitmapHolder mBitmapHolder; 15 | byte[] mFrame; 16 | protected int mFrameWidth; 17 | protected int mFrameHeight; 18 | protected ICharacterStateHandler mStateHandler; 19 | 20 | public ICharacterStateHandler getmStateHandler() { 21 | return mStateHandler; 22 | } 23 | 24 | public void setmStateHandler(ICharacterStateHandler mStateHandler) { 25 | this.mStateHandler = mStateHandler; 26 | } 27 | 28 | public FrameProcessorBase(Context context) { 29 | super(context); 30 | } 31 | 32 | public FrameProcessorBase(Context context, AttributeSet attrs) { 33 | super(context, attrs); 34 | } 35 | 36 | public FrameProcessorBase(Context context, AttributeSet attrs, int defStyle) { 37 | super(context, attrs, defStyle); 38 | } 39 | 40 | protected abstract Bitmap processFrame(byte[] data); 41 | protected abstract void init(); 42 | 43 | public void run() { 44 | mThreadRun = true; 45 | Log.i(TAG, "Starting processing thread"); 46 | while (mThreadRun) { 47 | Bitmap bmp = null; 48 | 49 | synchronized (this) { 50 | try { 51 | this.wait(); 52 | bmp = processFrame(mFrame); 53 | } catch (InterruptedException e) { 54 | e.printStackTrace(); 55 | } 56 | } 57 | 58 | if (bmp != null && mBitmapHolder != null) { 59 | // Canvas canvas = mDrawHolder.lockCanvas(); 60 | // if (canvas != null) { 61 | // canvas.drawBitmap(bmp, (canvas.getWidth() - getFrameWidth()) 62 | // / 2, (canvas.getHeight() - getFrameHeight()) / 2, null); 63 | // mDrawHolder.unlockCanvasAndPost(canvas); 64 | // } 65 | // bmp.recycle(); 66 | synchronized (bmp) { 67 | mBitmapHolder.setBmp(bmp); 68 | } 69 | } 70 | } 71 | } 72 | 73 | public IBitmapHolder getBitmapHolder() { 74 | return mBitmapHolder; 75 | } 76 | 77 | public void setBitmapHolder(IBitmapHolder mDrawHolder) { 78 | this.mBitmapHolder = mDrawHolder; 79 | } 80 | 81 | public int getFrameWidth() { 82 | return mFrameWidth; 83 | } 84 | 85 | public int getFrameHeight() { 86 | return mFrameHeight; 87 | } 88 | 89 | public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) { 90 | Log.i(TAG, "surfaceChanged"); 91 | } 92 | 93 | public void surfaceCreated(SurfaceHolder holder) { 94 | Log.i(TAG, "surfaceCreated"); 95 | } 96 | 97 | public void surfaceDestroyed(SurfaceHolder holder) { 98 | Log.i(TAG,"surfaceDestroyed"); 99 | mThreadRun = false; 100 | } 101 | } -------------------------------------------------------------------------------- /jni/PCOpenCVTest/main.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | #include 4 | #include "Detector.h" 5 | 6 | Mat img; 7 | 8 | void onMouse( int event, int x, int y, int, void* ) { 9 | cout << "mouse vent" << endl; 10 | if( event != CV_EVENT_LBUTTONUP ) 11 | return; 12 | 13 | Point p(x,y); 14 | // 15 | // 16 | // Mat img_roi = img(Rect(p.x-5,p.y-5,10,10)); 17 | // Mat hsv_; cvtColor(img_roi, hsv_, CV_BGR2HSV); 18 | // Scalar mean_ = mean(hsv_); 19 | // stringstream ss; ss << mean_.val[0] << "," << mean_.val[1] << "," << mean_.val[2]; 20 | stringstream ss; ss< state(1); 41 | state[0] = CALIBRATE_NO_MARKERS_FOUND; 42 | 43 | vc.open("video-2011-09-11-20-35-26.3gp"); 44 | int frame_index = 1; 45 | while (vc.isOpened()/* frame_index < 100*/) { 46 | vc >> frame; 47 | // stringstream ss; ss<<"/tmp/from_device/frame"<(2); 67 | d.selfCharacter[0] = Point(120,145); 68 | d.selfCharacter[1] = Point(170,245); 69 | 70 | d.findCharacter(img, IAM_RED, true, true); 71 | // vector o_t = d.otherCharacter; 72 | // if (o_t.size() > 1) { 73 | // float a = 0.2; 74 | // Mat trans = (Mat_(2,2) << d.selfCharacter[0].x,d.selfCharacter[0].y,d.selfCharacter[0].x,d.selfCharacter[0].y); 75 | //// Mat translated = (Mat_(2,2) << o_t[0].x,o_t[0].y,o_t[1].x,o_t[1].y) - trans; 76 | // Mat o_t_m; Mat(o_t).reshape(1,2).convertTo(o_t_m,CV_32F); 77 | // Mat translated = o_t_m - trans; 78 | // Mat oo_t_m = Mat(o_t).reshape(1,2); 79 | // translated.convertTo(oo_t_m, CV_32S); 80 | // Mat rot_mat = (Mat_(2,2) << cos(a) , -sin(a), sin(a), cos(a)); 81 | // Mat rotated = translated * rot_mat; 82 | // rotated.convertTo(oo_t_m, CV_32S); 83 | // rotated = rotated + trans; 84 | // rotated.convertTo(oo_t_m, CV_32S); 85 | // 86 | // line(img, d.selfCharacter[0], d.selfCharacter[1], Scalar(0,255), 2); 87 | // line(img, d.otherCharacter[0], d.otherCharacter[1], Scalar(255,255), 2); 88 | // line(img, o_t[0], o_t[1], Scalar(0,255,255), 3); 89 | // 90 | // } 91 | 92 | 93 | // } 94 | 95 | // if (d.otherCharacter.size()>=2) { 96 | // line(img, d.otherCharacter[0], d.otherCharacter[1], Scalar(255,0,0), 2); 97 | // } 98 | // if (d.selfCharacter.size()>=2) { 99 | // line(img, d.selfCharacter[0], d.selfCharacter[1], Scalar(0,255,0), 2); 100 | // } 101 | 102 | { 103 | double sz_of_self = d.getSizeOfSelf(); 104 | stringstream ss; ss << "sz " << sz_of_self; 105 | putText(img, ss.str(), d.getSelfCenter(), CV_FONT_HERSHEY_PLAIN, 1.0, Scalar(0,0,255), 1); 106 | } 107 | 108 | imshow("temp",img); 109 | // vw.write(img); 110 | 111 | 112 | int c = waitKey(30); 113 | if(c==' ') waitKey(0); 114 | if(c==27) break; 115 | } 116 | 117 | return 0; 118 | } 119 | -------------------------------------------------------------------------------- /src/edu/mit/media/fluid/royshil/headfollower/CameraFrameProcessor.java: -------------------------------------------------------------------------------- 1 | package edu.mit.media.fluid.royshil.headfollower; 2 | 3 | import java.io.IOException; 4 | import java.util.List; 5 | 6 | import android.content.Context; 7 | import android.hardware.Camera; 8 | import android.hardware.Camera.PreviewCallback; 9 | import android.util.AttributeSet; 10 | import android.util.Log; 11 | import android.view.SurfaceHolder; 12 | 13 | public abstract class CameraFrameProcessor extends FrameProcessorBase { 14 | static final String TAG = "CameraFrameProcessor"; 15 | 16 | protected Camera mCamera; 17 | private SurfaceHolder mHolder; 18 | 19 | public CameraFrameProcessor(Context context, AttributeSet attrs) { 20 | super(context, attrs); 21 | init(); 22 | } 23 | 24 | public CameraFrameProcessor(Context context) { 25 | super(context); 26 | init(); 27 | } 28 | 29 | @Override 30 | protected void init() { 31 | mHolder = getHolder(); 32 | mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); 33 | mHolder.addCallback(this); 34 | Log.i(TAG, "Instantiated new " + this.getClass()); 35 | } 36 | 37 | public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) { 38 | Log.i(TAG, "surfaceChanged"); 39 | if(mCamera == null) initCamera(mHolder); 40 | if (mCamera != null) { 41 | Camera.Parameters params = mCamera.getParameters(); 42 | List sizes = params.getSupportedPreviewSizes(); 43 | mFrameWidth = width; 44 | mFrameHeight = height; 45 | 46 | // selecting optimal camera preview size 47 | { 48 | double minDiff = Double.MAX_VALUE; 49 | for (Camera.Size size : sizes) { 50 | if (Math.abs(size.height - height) < minDiff) { 51 | mFrameWidth = size.width; 52 | mFrameHeight = size.height; 53 | minDiff = Math.abs(size.height - height); 54 | } 55 | } 56 | } 57 | 58 | params.setPreviewSize(getFrameWidth(), getFrameHeight()); 59 | mCamera.setParameters(params); 60 | mCamera.startPreview(); 61 | } 62 | } 63 | 64 | public void initCamera(SurfaceHolder holder) { 65 | mCamera = Camera.open(); 66 | mCamera.setErrorCallback(new Camera.ErrorCallback() { 67 | public void onError(int error, Camera camera) { 68 | Log.i(TAG,"ErrorCallback,"+error); 69 | } 70 | }); 71 | mCamera.setOneShotPreviewCallback(new PreviewCallback() { 72 | public void onPreviewFrame(byte[] data, Camera camera) { 73 | Log.i(TAG,"OneShotPreviewCallback"); 74 | } 75 | }); 76 | mCamera.setPreviewCallback(new PreviewCallback() { 77 | public void onPreviewFrame(byte[] data, Camera camera) { 78 | Log.i(TAG, "got frame"); 79 | synchronized (CameraFrameProcessor.this) { 80 | mFrame = data; 81 | CameraFrameProcessor.this.notify(); 82 | } 83 | } 84 | }); 85 | try { 86 | mCamera.setPreviewDisplay(holder); 87 | } catch (IOException e) { 88 | Log.e(TAG, "mCamera.setPreviewDisplay fails: " + e); 89 | } 90 | (new Thread(this)).start(); 91 | } 92 | 93 | public void surfaceDestroyed(SurfaceHolder holder) { 94 | super.surfaceDestroyed(holder); 95 | Log.i(TAG, "surfaceDestroyed"); 96 | mThreadRun = false; 97 | if (mCamera != null) { 98 | synchronized (this) { 99 | mCamera.stopPreview(); 100 | mCamera.setPreviewCallback(null); 101 | mCamera.release(); 102 | mCamera = null; 103 | } 104 | } 105 | } 106 | 107 | } -------------------------------------------------------------------------------- /jni/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | ######################################################### 2 | # Find opencv and android-opencv 3 | ######################################################### 4 | 5 | #set(OpenCV_DIR ${CMAKE_SOURCE_DIR}/../../build 6 | # CACHE PATH "The path where you built opencv for android") 7 | #set(AndroidOpenCV_DIR ${CMAKE_SOURCE_DIR}/../../android-opencv/build_neon 8 | # CACHE PATH "The path where you built android-opencv") 9 | 10 | if( ARMEABI_V7A ) 11 | if ( NEON ) 12 | set(OpenCV_DIR $ENV{OPENCV_PACKAGE_DIR}/armeabi-v7a-neon/share/opencv/) 13 | else() 14 | set(OpenCV_DIR $ENV{OPENCV_PACKAGE_DIR}/armeabi-v7a/share/opencv/) 15 | endif() 16 | else() 17 | set(OpenCV_DIR $ENV{OPENCV_PACKAGE_DIR}/armeabi/share/opencv/) 18 | endif() 19 | 20 | 21 | find_package(OpenCV REQUIRED NO_DEFAULT_PATH PATHS ${OpenCV_DIR}) 22 | message("OpenCV dir: " ${OpenCV_DIR}) 23 | 24 | if( ARMEABI_V7A ) 25 | set(AndroidOpenCV_DIR ${CMAKE_SOURCE_DIR}/../../android-opencv/build_neon/) 26 | else() 27 | set(AndroidOpenCV_DIR $ENV{HOME}/Downloads/trunk/opencv/android/android-opencv/build_armeabi) 28 | endif() 29 | 30 | message ("android-opencv dir " ${AndroidOpenCV_DIR}) 31 | FIND_PACKAGE(AndroidOpenCV REQUIRED PATHS(${AndroidOpenCV_DIR}) ) 32 | message("android-opencv lib-dir: " ${AndroidOpenCV_LIB_DIR}) 33 | 34 | 35 | ######################################################### 36 | #c flags, included, and lib dependencies 37 | ######################################################### 38 | 39 | #notice the "recycling" of CMAKE_C_FLAGS 40 | #this is necessary to pick up android flags 41 | set( CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wall -pedantic -fPIC" ) 42 | 43 | INCLUDE_DIRECTORIES(${CMAKE_CURRENT_SOURCE_DIR}) 44 | 45 | message("OpenCV libs: " ${OpenCV_LIBS} ) 46 | 47 | set( MY_OpenCV_LIBS 48 | # debug opencv_contrib 49 | # optimized opencv_contrib 50 | # debug opencv_calib3d 51 | # optimized opencv_calib3d 52 | # debug opencv_objdetect 53 | # optimized opencv_objdetect 54 | debug opencv_features2d 55 | optimized opencv_features2d 56 | debug opencv_imgproc 57 | optimized opencv_imgproc 58 | debug opencv_video 59 | optimized opencv_video 60 | debug opencv_highgui 61 | optimized opencv_highgui 62 | # debug opencv_ml 63 | # optimized opencv_ml 64 | # debug opencv_legacy 65 | # optimized opencv_legacy 66 | # debug opencv_flann 67 | # optimized opencv_flann 68 | debug opencv_core 69 | optimized opencv_core 70 | # debug opencv_androidcamera 71 | # optimized opencv_androidcamera 72 | debug libjpeg 73 | optimized libjpeg 74 | debug libpng 75 | optimized libpng 76 | debug libtiff 77 | optimized libtiff 78 | debug libjasper 79 | optimized libjasper 80 | debug zlib 81 | optimized zlib 82 | ) 83 | 84 | set( LIBRARY_DEPS ${AndroidOpenCV_LIBS} ${MY_OpenCV_LIBS} ) 85 | if(ANDROID) 86 | set( LIBRARY_DEPS ${LIBRARY_DEPS} log dl) 87 | endif(ANDROID) 88 | 89 | message("library deps: " ${LIBRARY_DEPS}) 90 | 91 | ######################################################### 92 | #SWIG STUFF 93 | ######################################################### 94 | #the java package to place swig generated java files in 95 | set(MY_PACKAGE edu.mit.media.fluid.charactertracker.jni) 96 | 97 | if(NOT ANDROID) 98 | #non android swig and jni 99 | #jni is available by default on android 100 | find_package(JNI REQUIRED) 101 | include_directories(${JNI_INCLUDE_DIRS}) 102 | FIND_PACKAGE(SWIG) 103 | endif() 104 | 105 | INCLUDE(${SWIG_USE_FILE}) #on android this is found by the cmake toolchain 106 | 107 | if(ANDROID) 108 | #this will set the output path for the java package 109 | #and properly create the package declarations in generated java sources 110 | SET_SWIG_JAVA_PACKAGE( ${MY_PACKAGE} ) #defined in the android toolchain 111 | endif(ANDROID) 112 | 113 | #this add's the swig path for the opencv wrappers 114 | SET(CMAKE_SWIG_FLAGS ${CMAKE_SWIG_FLAGS} "-I${AndroidOpenCV_SWIG_DIR}" ) 115 | 116 | SET_SOURCE_FILES_PROPERTIES(charactertracker.i PROPERTIES CPLUSPLUS ON) 117 | 118 | #add the swig module, giving it the name, java, and then all of the source files 119 | SWIG_ADD_MODULE(charactertracker java 120 | charactertracker.i #swig file 121 | Detector.cpp #cpp files can be compiled to 122 | ) 123 | 124 | #link the module like any other 125 | target_link_libraries(charactertracker ${LIBRARY_DEPS} ) 126 | -------------------------------------------------------------------------------- /jni/Detector.h: -------------------------------------------------------------------------------- 1 | 2 | #ifndef DETECTOR_H_ 3 | #define DETECTOR_H_ 4 | 5 | #include 6 | #include 7 | #include 8 | #include 9 | //#include 10 | //#include 11 | #include 12 | 13 | #include 14 | using namespace std; 15 | 16 | using namespace cv; 17 | 18 | #define IAM_BLUE 1 19 | #define IAM_RED 2 20 | 21 | #define CALIBRATE_NO_MARKERS_FOUND 0 22 | #define CALIBRATE_SEND_EXTRA_MARKER 1 23 | #define CALIBRATE_FOUND 2 24 | #define CALIBRATE_NO_EXTRA_MARKER_FOUND 3 25 | 26 | #define DETECTOR_EPSILON 0.05 27 | #define DETECTOR_TIGHT_EPSILON 0.01 28 | 29 | #define Point2Vec2f(p) Vec2f((p).x,(p).y) 30 | #define Vec2f2Point(v) Point((v)[0],(v)[1]) 31 | 32 | class Detector { 33 | 34 | // vector points,nextPoints; 35 | // vector ptsstatus; 36 | // vector ptserror; 37 | // bool gatherPoints; 38 | // long framecount; 39 | // Mat prevgray; 40 | // VideoWriter writer; 41 | 42 | 43 | int waveTimer; 44 | 45 | Scalar blueHSVThresh, redHSVThresh; 46 | 47 | //Meanshift tracker 48 | int trackObject; 49 | int hsize[3]; 50 | float hranges[2],sranges[2],vranges[2]; 51 | const float* phranges[3]; 52 | Mat hist; 53 | Mat trackMask; 54 | Mat hue; 55 | Mat histimg; 56 | Mat backproj; 57 | bool other_extra_marker_found; 58 | bool kalman_setup; 59 | 60 | 61 | Mat _img; 62 | Mat img; 63 | Mat gray; 64 | Mat hsv; 65 | 66 | //Kalman filter 67 | KalmanFilter KF[2]; 68 | Mat_ state; 69 | Mat processNoise; 70 | Mat_ measurement; 71 | 72 | int calibration_state; 73 | int look_for_extra_marker_count; 74 | 75 | vector > calib_history; 76 | vector circlepts; 77 | 78 | public: 79 | bool shouldResize; 80 | bool tracking; 81 | vector otherCharacter; 82 | vector selfCharacter; 83 | float character_to_world_ang; 84 | 85 | Detector():waveTimer(0), 86 | trackObject(-1), 87 | kalman_setup(false), 88 | calibration_state(CALIBRATE_NO_MARKERS_FOUND), 89 | look_for_extra_marker_count(0), 90 | shouldResize(false) 91 | { 92 | //Setup Meanshift tracker 93 | hranges[0] = 0; hranges[1] = 180; 94 | sranges[0] = 0; sranges[1] = 255; 95 | vranges[0] = 0; vranges[1] = 255; 96 | phranges[0] = hranges; 97 | phranges[1] = sranges; 98 | phranges[2] = vranges; 99 | hsize[0] = 20; hsize[1] = 28; hsize[2] = 28; 100 | 101 | ellipse2Poly(Point(0,0), Size(10,10),0,0,360,6,circlepts); 102 | 103 | #ifdef _PC_COMPILE 104 | histimg = Mat::zeros(200, 320, CV_8UC3); 105 | #endif 106 | }; 107 | ~Detector() {}; 108 | 109 | void setupKalmanFilter() { 110 | //Setup Kalman Filter 111 | for (int i=0; i<2; i++) { 112 | KF[i] = KalmanFilter(4, 2, 0); 113 | state = Mat_(4, 1); // (x, y, Vx, Vy) 114 | processNoise = Mat(4, 1, CV_32F); 115 | measurement = (Mat_(2,1) << 0 , 0); 116 | setIdentity(KF[i].measurementMatrix); 117 | setIdentity(KF[i].processNoiseCov, Scalar::all(1e-4)); 118 | setIdentity(KF[i].measurementNoiseCov, Scalar::all(1e-4)); 119 | setIdentity(KF[i].errorCovPost, Scalar::all(.1)); 120 | 121 | KF[i].transitionMatrix = *(Mat_(4, 4) << 1,0,1,0, 0,1,0,1, 0,0,1,0, 0,0,0,1); 122 | } 123 | kalman_setup = true; 124 | } 125 | 126 | vector GetPointsUsingBlobs(vector& _points, Mat& img, Mat& hsv, bool get_all_blobs, int i_am, bool _debug); 127 | 128 | vector findCharacter(Mat& img, int i_am, bool _flip, bool _debug); 129 | 130 | vector calibrateSelfCharacter(Mat& img, int i_am, bool _flip, bool _debug); 131 | vector calibrateOtherCharacter(Mat& img, int i_am, bool _flip, bool _debug); 132 | 133 | void TrackPoints(Rect markers[], bool _debug); 134 | void KalmanSmooth(); 135 | 136 | int getPtX(Point* p) { return p->x;} 137 | int getPtY(Point* p) { return p->y;} 138 | 139 | Point getPointFromVector(vector* v, int idx) { return ((v->size() > idx) ? (*v)[idx] : Point(-1,-1)); } 140 | 141 | double getSizeOfSelf(); 142 | 143 | Point getSelfCenter() { if(selfCharacter.size()>=2) { return (selfCharacter[0]+selfCharacter[1])*.5; } else { return Point(-1,-1); } } 144 | Point getOtherCenter() { if(otherCharacter.size()>=2) { return (otherCharacter[0]+otherCharacter[1])*.5; } else { return Point(-1,-1); } } 145 | float getSelfAngle() { return 1.0; } 146 | int getWaveTimer() { return waveTimer; } 147 | bool FindExtraMarker(vector& pts); 148 | bool FindExtraMarkerUsingBlobs(int i_am); 149 | 150 | void setCalibrationState(int state) { calibration_state = state;} 151 | 152 | void setupImages(Mat& _img, bool _flip) { 153 | #ifndef _PC_COMPILE 154 | if(shouldResize) 155 | resize(_img,img,Size(),0.5,0.5); 156 | else 157 | _img.copyTo(img); 158 | 159 | //rotate 90 degrees CCW 160 | double angle = -90.0; 161 | Point2f src_center(img.rows/2.0, img.rows/2.0); 162 | Mat rot_mat = getRotationMatrix2D(src_center, angle, 1.0); 163 | Mat dst; 164 | warpAffine(img, dst, rot_mat, Size(img.rows,img.cols)); 165 | if(_flip) flip(dst,dst,0); 166 | dst.copyTo(img); 167 | #else 168 | _img.copyTo(img); 169 | #endif 170 | 171 | // cvtColor(img, img, CV_RGB2BGR); 172 | // cvtColor(img, gray, CV_RGB2GRAY); 173 | cvtColor(img, hsv, CV_BGR2HSV); 174 | 175 | // if(!hue.data) 176 | // hue.create(hsv.size(), hsv.depth()); 177 | // int ch[] = {0, 0}; 178 | // mixChannels(&hsv, 1, &hue, 1, ch, 1); //prepare hue data for extra marker 179 | } 180 | 181 | }; 182 | 183 | #endif -------------------------------------------------------------------------------- /src/edu/mit/media/fluid/royshil/headfollower/CharacterTrackerView.java: -------------------------------------------------------------------------------- 1 | package edu.mit.media.fluid.royshil.headfollower; 2 | 3 | import java.util.Arrays; 4 | import java.util.EnumSet; 5 | import java.util.HashMap; 6 | import java.util.Map; 7 | import java.util.concurrent.Semaphore; 8 | 9 | import android.content.Context; 10 | import android.graphics.Bitmap; 11 | import android.util.AttributeSet; 12 | import android.util.Log; 13 | 14 | public class CharacterTrackerView extends CameraFrameProcessor { 15 | 16 | private static final String TAG = "CharacterTrackerView"; 17 | private boolean debug = false; 18 | private boolean flip = false; 19 | private int i_am = 2; 20 | 21 | public enum State 22 | { 23 | CALIBRATING_NO_MARKERS_FOUND(0), 24 | CALIBRATING_SHOW_MARKER(1), 25 | WORKING(2), 26 | CALIBRTING_NO_EXTRA_MARKER_FOUND(3), 27 | NO_TRACKING(99); 28 | 29 | private static final Map lookup = new HashMap(); 30 | static { 31 | for(State s : EnumSet.allOf(State.class)) lookup.put(s.getCode(), s); 32 | } 33 | private int code; 34 | private State(int code) { this.code = code; } 35 | public int getCode() { return code; } 36 | public static State get(int code) { return lookup.get(code); } 37 | } 38 | 39 | private State currentState = State.CALIBRATING_NO_MARKERS_FOUND; 40 | private Semaphore currentStateLock = new Semaphore(1); 41 | private IMarkerShower mMarkerShower; 42 | 43 | 44 | public CharacterTrackerView(Context context, AttributeSet attrs) { 45 | super(context, attrs); 46 | // filename = "/sdcard/DCIM/Camera/video-2011-09-11-20-35-26.avi"; 47 | // filename = "/sdcard/video/frame"; 48 | init(); 49 | Log.i(TAG,"new instance"); 50 | } 51 | 52 | public CharacterTrackerView(Context context) { 53 | super(context); 54 | init(); 55 | Log.i(TAG,"new instance"); 56 | } 57 | 58 | @Override 59 | protected void init() { 60 | super.init(); 61 | ResetFrameIndex(); 62 | } 63 | 64 | @Override 65 | protected Bitmap processFrame(byte[] data) { 66 | Bitmap bmp = Bitmap.createBitmap(getFrameWidth(), getFrameHeight(), Bitmap.Config.ARGB_8888); 67 | 68 | try { 69 | currentStateLock.acquire(); 70 | } catch (InterruptedException e1) { 71 | Log.e(TAG,"interrupted on currentStateLock.acquire()"); 72 | return bmp; 73 | } 74 | 75 | if(currentState == State.NO_TRACKING) { 76 | return bmp; //skip everything 77 | } 78 | 79 | int frameSize = getFrameWidth() * getFrameHeight(); 80 | int[] rgba = new int[frameSize]; 81 | 82 | if(currentState == State.CALIBRATING_NO_MARKERS_FOUND || currentState == State.CALIBRTING_NO_EXTRA_MARKER_FOUND || currentState == State.CALIBRATING_SHOW_MARKER) { 83 | if(currentState == State.CALIBRATING_SHOW_MARKER || currentState == State.CALIBRTING_NO_EXTRA_MARKER_FOUND) { 84 | Log.i(TAG,"Show marker"); 85 | mMarkerShower.showMarker(); 86 | } else if (currentState == State.CALIBRATING_NO_MARKERS_FOUND) { 87 | Log.i(TAG,"Calibrating..."); 88 | mMarkerShower.removeMarker(); 89 | } 90 | try { 91 | Thread.sleep(100); //sleep a little to let the marker show up / clear out 92 | } catch (InterruptedException e) { 93 | e.printStackTrace(); 94 | } 95 | 96 | // WriteFrame(getFrameWidth(), getFrameHeight(), data); 97 | int returnState[] = CalibrateSelf(getFrameWidth(), getFrameHeight(), data, rgba, i_am, flip, debug); 98 | currentState = State.get(returnState[0]); 99 | Log.i(TAG,"state: "+Arrays.toString(returnState)); 100 | Log.i(TAG,"new state = " + currentState); 101 | if(currentState==State.WORKING) { 102 | Log.i(TAG,"Marker found"); 103 | mMarkerShower.removeMarker(); 104 | mMarkerShower.showCharacter(); 105 | } 106 | mStateHandler.onCalibrationStateChanged(returnState); 107 | } else if(currentState == State.WORKING) { 108 | // WriteFrame(getFrameWidth(), getFrameHeight(), data); 109 | float[] state = FindFeatures(getFrameWidth(), getFrameHeight(), data, rgba, i_am, flip, debug); 110 | Log.i(TAG,"State: " + Arrays.toString(state)); 111 | if(mStateHandler!=null) mStateHandler.onCharacterStateChanged(state); //let the handler know.. 112 | } 113 | 114 | currentStateLock.release(); 115 | 116 | // WriteFrame(getFrameWidth(), getFrameHeight(), data); 117 | // float[] state = ProcessFileFrame(mFrameWidth, mFrameHeight, rgba, i_am, flip, debug); 118 | 119 | 120 | bmp.setPixels(rgba, 0/* offset */, getFrameWidth() /* stride */, 0, 0, getFrameWidth(), getFrameHeight()); 121 | return bmp; 122 | } 123 | 124 | public boolean isDebug() { 125 | return debug; 126 | } 127 | public void setDebug(boolean debug) { 128 | this.debug = debug; 129 | } 130 | public boolean isFlip() { 131 | return flip; 132 | } 133 | public void setFlip(boolean flip) { 134 | this.flip = flip; 135 | } 136 | public int getI_am() { 137 | return i_am; 138 | } 139 | public void setI_am(int i_am) { 140 | this.i_am = i_am; 141 | } 142 | public IMarkerShower getmMarkerShower() { 143 | return mMarkerShower; 144 | } 145 | 146 | public void setmMarkerShower(IMarkerShower mMarkerShower) { 147 | this.mMarkerShower = mMarkerShower; 148 | } 149 | public State getCurrentState() { 150 | return currentState; 151 | } 152 | 153 | public void setCurrentState(State currentState) { 154 | this.currentState = currentState; 155 | } 156 | 157 | public native float[] FindFeatures(int width, int height, byte yuv[], int[] rgba, int i_am, boolean _flip, boolean _debug); 158 | public native float[] ProcessFileFrame(int width, int height, int[] bgra, int i_am, boolean _flip, boolean _debug); 159 | public native void WriteFrame(int width, int height, byte yuv[]); 160 | public native int[] CalibrateSelf(int width, int height, byte yuv[], int[] rgba, int i_am, boolean _flip, boolean _debug); 161 | public native void ResetFrameIndex(); 162 | public native void SetCalibrationState(int state); 163 | 164 | static { 165 | Log.i(TAG,"System.loadLibrary(...);"); 166 | System.loadLibrary("headfollower_native"); 167 | Log.i(TAG,"loaded."); 168 | } 169 | 170 | public void disableTracking() { 171 | try { 172 | currentStateLock.acquire(); 173 | } catch (InterruptedException e) { 174 | Log.e(TAG,"thread interrupted while skipping calibration",e); 175 | } 176 | currentState = State.NO_TRACKING; 177 | mMarkerShower.showCharacter(); 178 | 179 | mCamera.stopPreview(); 180 | mCamera.setPreviewCallback(null); 181 | mCamera.release(); 182 | 183 | currentStateLock.release(); 184 | } 185 | 186 | public void recalibrate() { 187 | try { 188 | currentStateLock.acquire(); 189 | } catch (InterruptedException e) { 190 | Log.e(TAG,"thread interrupted while recalibrate",e); 191 | } 192 | SetCalibrationState(State.CALIBRATING_NO_MARKERS_FOUND.code); 193 | currentState = State.CALIBRATING_NO_MARKERS_FOUND; 194 | mMarkerShower.showCalibrationMessage(); 195 | currentStateLock.release(); 196 | } 197 | 198 | 199 | } 200 | -------------------------------------------------------------------------------- /src/edu/mit/media/fluid/royshil/graphics/MyCanvasView.java: -------------------------------------------------------------------------------- 1 | package edu.mit.media.fluid.royshil.graphics; 2 | 3 | import java.io.IOException; 4 | import java.text.DecimalFormat; 5 | import java.util.concurrent.locks.Lock; 6 | import java.util.concurrent.locks.ReentrantLock; 7 | 8 | import android.app.AlertDialog; 9 | import android.content.Context; 10 | import android.content.res.AssetManager; 11 | import android.graphics.Bitmap; 12 | import android.graphics.BitmapFactory; 13 | import android.graphics.Canvas; 14 | import android.graphics.Color; 15 | import android.graphics.Matrix; 16 | import android.graphics.Paint; 17 | import android.graphics.Paint.Style; 18 | import android.graphics.Rect; 19 | import android.util.AttributeSet; 20 | import android.util.Log; 21 | import android.view.View; 22 | 23 | public class MyCanvasView extends View { 24 | 25 | private float rotation; 26 | private float scale; 27 | private Matrix matrix; 28 | private Bitmap bmp; 29 | private Paint paint; 30 | private Animator a; 31 | private RotatorScaler r; 32 | public Lock matrixLock; //may use Object and notify()-wait().. 33 | private Lock bmpLock; 34 | private Rect clip; 35 | private boolean mLookingRight; 36 | 37 | public MyCanvasView(Context context, AttributeSet attrs) { 38 | super(context, attrs); 39 | matrix = new Matrix(); matrix.setScale(1.0f, 1.0f); 40 | paint = new Paint(); 41 | paint.setColor(Color.RED); 42 | paint.setStyle(Style.STROKE); 43 | matrixLock = new ReentrantLock(); 44 | bmpLock = new ReentrantLock(); 45 | rotation = 0.0f; 46 | scale = 1.0f; 47 | mLookingRight = false; 48 | } 49 | 50 | @Override 51 | protected void onDraw(Canvas canvas) { 52 | super.onDraw(canvas); 53 | 54 | if(bmp != null) { 55 | // paint.setColor(Color.BLUE); 56 | clip = canvas.getClipBounds(); 57 | float w = clip.width(), w2 = w/2.0f, h = clip.height(), h2 = h/2.0f; 58 | // canvas.drawRect(0,0,w-1,h-1, paint); 59 | // canvas.drawCircle(w2, h2, 10, paint); 60 | 61 | matrixLock.lock(); 62 | Matrix m = new Matrix(); 63 | m.reset(); 64 | m.postTranslate(0, 80); 65 | m.postConcat(matrix); 66 | canvas.setMatrix(m); 67 | matrixLock.unlock(); 68 | 69 | clip = canvas.getClipBounds(); 70 | w = clip.width(); 71 | w2 = w/2.0f; 72 | h = clip.height(); 73 | h2 = h/2.0f; 74 | // paint.setColor(Color.GREEN); 75 | // canvas.drawRect(0,0,w-1,h-1, paint); 76 | 77 | bmpLock.lock(); 78 | float left = scale*(w2 - (float)bmp.getWidth()/2.0f); 79 | float top = scale*(h2 - (float)bmp.getHeight()/2.0f); 80 | 81 | Bitmap _bmp = bmp; 82 | //check if rotated, and set the matrix accordingly 83 | if(!mLookingRight) { 84 | Matrix _m = new Matrix(); 85 | _m.preScale(-1.0f, 1.0f); 86 | _bmp = Bitmap.createBitmap(bmp, 0, 0, bmp.getWidth(), bmp.getHeight(), _m, false); 87 | } 88 | 89 | canvas.drawBitmap(_bmp, left, top, paint); 90 | // paint.setColor(Color.RED); 91 | // canvas.drawRect(left, top, left + bmp.getWidth(), top + bmp.getHeight(), paint); 92 | bmpLock.unlock(); 93 | } 94 | } 95 | 96 | private class Animator extends Thread { 97 | 98 | private final boolean shouldTurn; 99 | private final MyCanvasView myCanvasView; 100 | private final MyAnimations.MyAnim myAnim; 101 | private AssetManager assets; 102 | 103 | public Animator( 104 | MyAnimations.MyAnim myAnim, 105 | boolean shouldTurn, 106 | MyCanvasView myCanvasView) 107 | { 108 | this.myAnim = myAnim; 109 | this.shouldTurn = shouldTurn; 110 | this.myCanvasView = myCanvasView; 111 | assets = myCanvasView.getContext().getAssets(); 112 | } 113 | 114 | @Override 115 | public void run() { 116 | super.run(); 117 | 118 | try { 119 | do { 120 | if(myAnim.start == -1 || myAnim.end == -1) { 121 | //this is a single image, not an animation 122 | tryLoadBitmap(myAnim.filename); 123 | } else { 124 | for (int i = myAnim.start; i <= myAnim.end; i++) { 125 | boolean bitmapLoaded; 126 | bitmapLoaded = tryLoadBitmap(myAnim.filename + new DecimalFormat("0000").format(i) + ".png"); 127 | if(!bitmapLoaded) { break; } 128 | } 129 | } 130 | } while(myAnim.loop); 131 | } catch (InterruptedException e) { 132 | e.printStackTrace(); 133 | } finally { 134 | if(shouldTurn) { 135 | myCanvasView.setmLookingRight(!myCanvasView.ismLookingRight()); 136 | } 137 | } 138 | } 139 | 140 | private boolean tryLoadBitmap(String bmpFilename) throws InterruptedException { 141 | try { 142 | bmpLock.lock(); 143 | bmp = BitmapFactory.decodeStream(assets.open(bmpFilename)); 144 | bmpLock.unlock(); 145 | if(bmp == null) { 146 | AlertDialog a = new AlertDialog.Builder(getContext()).create(); 147 | a.setMessage("Cannot load image"); 148 | a.show(); 149 | return false; 150 | } 151 | myCanvasView.postInvalidate(); 152 | 153 | sleep(25); 154 | } catch (IOException e) { 155 | e.printStackTrace(); 156 | } 157 | return true; 158 | } 159 | } 160 | 161 | private class RotatorScaler extends Thread { 162 | private static final String TAG = "RotatorScaler"; 163 | private float deg; 164 | private float scl; 165 | private MyCanvasView myCanvasView; 166 | 167 | public RotatorScaler(float deg, float scl, MyCanvasView myCanvasView) { 168 | this.deg = deg; 169 | this.scl = scl; 170 | this.myCanvasView = myCanvasView; 171 | } 172 | 173 | @Override 174 | public void run() { 175 | super.run(); 176 | 177 | // float h2 = (float)myCanvasView.getHeight()/2.0f; 178 | // float w2 = (float)myCanvasView.getWidth()/2.0f; 179 | // float h2 = clip.height()/2.0f; 180 | // float w2 = clip.width()/2.0f; 181 | 182 | float rotstep = (deg-rotation)/20.0f; 183 | float scalestep = (scl-scale)/20.0f; 184 | for (int i = 0; i < 20; i++) { 185 | matrixLock.lock(); 186 | matrix.reset(); 187 | 188 | //linear interpolation 189 | // rotation += rotstep; 190 | // matrix.setRotate(rotation,w2,h2); 191 | 192 | scale += scalestep; 193 | matrix.postScale(scale, scale); 194 | 195 | matrixLock.unlock(); 196 | 197 | myCanvasView.postInvalidate(); 198 | 199 | try { 200 | sleep(30); 201 | } catch (InterruptedException e) { 202 | Log.i(TAG,"interrupted",e); 203 | break; 204 | } 205 | } 206 | } 207 | 208 | public void setDeg(float deg) {this.deg = deg;} 209 | public void setScl(float scl) {this.scl = scl;} 210 | 211 | public float getDeg() { 212 | return deg; 213 | } 214 | 215 | public float getScl() { 216 | return scl; 217 | } 218 | } 219 | 220 | public void fireAnimation(final MyAnimations.MyAnim myAnim, final boolean shouldTurn) { 221 | if(a != null && a.isAlive()) { 222 | a.interrupt(); 223 | a = null; 224 | // try { 225 | // a.join(); 226 | // } catch (InterruptedException e) { 227 | // e.printStackTrace(); 228 | // } 229 | } 230 | a = new Animator(myAnim,shouldTurn,this); 231 | a.start(); //TODO: reuse the object 232 | } 233 | 234 | public void setRotationAndScale(float deg, float scl) { 235 | if(r != null) { 236 | if(r.getDeg() != deg || r.getScl() != scl) { 237 | if(r.isAlive()) { 238 | r.interrupt(); 239 | // try { 240 | // r.join(); 241 | // } catch (InterruptedException e) { 242 | // e.printStackTrace(); 243 | // } 244 | } 245 | r.setDeg(deg); 246 | r.setScl(Math.min(scl,2.0f)); 247 | r.run(); 248 | } 249 | } else { 250 | r = new RotatorScaler(deg,scl,this); 251 | r.start(); 252 | } 253 | } 254 | 255 | public boolean ismLookingRight() { 256 | return mLookingRight; 257 | } 258 | 259 | public void setmLookingRight(boolean mLookingRight) { 260 | this.mLookingRight = mLookingRight; 261 | } 262 | 263 | } 264 | -------------------------------------------------------------------------------- /jni/jni_part.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include "Detector.h" 9 | 10 | #include 11 | #include 12 | #include 13 | 14 | using namespace std; 15 | using namespace cv; 16 | 17 | extern "C" { 18 | jfloatArray GoDetector(JNIEnv* env, Mat& mbgra, jboolean i_am, jboolean _flip, jboolean _debug); 19 | 20 | Detector detector; 21 | 22 | JNIEXPORT jfloatArray JNICALL Java_edu_mit_media_fluid_royshil_headfollower_CharacterTrackerView_FindFeatures( 23 | JNIEnv* env, 24 | jobject thiz, 25 | jint width, 26 | jint height, 27 | jbyteArray yuv, 28 | jintArray bgra, 29 | jint i_am, 30 | jboolean _flip, 31 | jboolean _debug ) 32 | { 33 | jbyte* _yuv = env->GetByteArrayElements(yuv, 0); 34 | jint* _bgra = env->GetIntArrayElements(bgra, 0); 35 | 36 | Mat myuv(height + height/2, width, CV_8UC1, (unsigned char *)_yuv); 37 | Mat _mbgra(height, width, CV_8UC4, (unsigned char *)_bgra); 38 | Mat mgray(height, width, CV_8UC1, (unsigned char *)_yuv); 39 | 40 | //Please make attention about BGRA byte order 41 | //ARGB stored in java as int array becomes BGRA at native level 42 | cvtColor(myuv, _mbgra, CV_YUV420sp2BGR, 4); 43 | 44 | env->ReleaseByteArrayElements(yuv, _yuv, 0); 45 | 46 | detector.shouldResize = true; 47 | 48 | //slicing the region of interest... 49 | Mat mbgra = _mbgra(Rect(_mbgra.cols/4,_mbgra.rows/10,_mbgra.cols/2,8*_mbgra.rows/10)); 50 | 51 | // vector v; 52 | // 53 | // FastFeatureDetector fastdetector(50); 54 | // fastdetector.detect(mgray, v); 55 | // for( size_t i = 0; i < v.size(); i++ ) 56 | // circle(mbgra, Point(v[i].pt.x, v[i].pt.y), 10, Scalar(0,0,255,255)); 57 | 58 | jfloatArray returnfa = GoDetector(env, mbgra, i_am, _flip, _debug); 59 | env->ReleaseIntArrayElements(bgra, _bgra, 0); 60 | 61 | return returnfa; 62 | } 63 | 64 | JNIEXPORT jintArray JNICALL Java_edu_mit_media_fluid_royshil_headfollower_CharacterTrackerView_CalibrateSelf( 65 | JNIEnv* env, 66 | jobject thiz, 67 | jint width, 68 | jint height, 69 | jbyteArray yuv, 70 | jintArray bgra, 71 | jint i_am, 72 | jboolean _flip, 73 | jboolean _debug ) 74 | { 75 | jbyte* _yuv = env->GetByteArrayElements(yuv, 0); 76 | jint* _bgra = env->GetIntArrayElements(bgra, 0); 77 | 78 | Mat myuv(height + height/2, width, CV_8UC1, (unsigned char *)_yuv); 79 | Mat _mbgra(height, width, CV_8UC4, (unsigned char *)_bgra); 80 | cvtColor(myuv, _mbgra, CV_YUV420sp2BGR, 4); 81 | env->ReleaseByteArrayElements(yuv, _yuv, 0); 82 | 83 | detector.shouldResize = true; 84 | 85 | //slicing the region of interest... 86 | Mat mbgra = _mbgra(Rect(_mbgra.cols/4,_mbgra.rows/10,_mbgra.cols/2,8*_mbgra.rows/10)); 87 | 88 | Scalar m = mean(mbgra); 89 | 90 | vector inta = detector.calibrateSelfCharacter(mbgra,i_am,_flip,_debug); 91 | 92 | env->ReleaseIntArrayElements(bgra, _bgra, 0); 93 | 94 | jintArray state = env->NewIntArray(4); 95 | env->SetIntArrayRegion(state,0,4,&(inta[0])); 96 | return state; 97 | } 98 | 99 | 100 | jfloatArray GoDetector(JNIEnv* env, Mat& mbgra, jboolean i_am, jboolean _flip, jboolean _debug) { 101 | vector statev = detector.findCharacter(mbgra, i_am, _flip, _debug); 102 | 103 | int slfChrSz = detector.selfCharacter.size(); 104 | int othrChrSz = detector.otherCharacter.size(); 105 | 106 | vector o_t = detector.otherCharacter; 107 | if (othrChrSz > 1) { 108 | float a = detector.character_to_world_ang; 109 | Point origin = detector.getSelfCenter(); 110 | Mat trans = (Mat_(2,2) << origin.x,origin.y,origin.x,origin.y); 111 | Mat o_t_m; Mat(o_t).reshape(1,2).convertTo(o_t_m,CV_32F); 112 | Mat translated = o_t_m - trans; 113 | Mat rot_mat = (Mat_(2,2) << cos(a) , -sin(a), sin(a), cos(a)); 114 | Mat rotated = translated * rot_mat; 115 | rotated = rotated + trans; 116 | Mat oo_t_m = Mat(o_t).reshape(1,2); 117 | rotated.convertTo(oo_t_m, CV_32S); 118 | } 119 | 120 | jfloat a[15] = //{0.0f}; 121 | { 122 | (slfChrSz>0) ? detector.selfCharacter[0].x : -1.0f, 123 | (slfChrSz>0) ? detector.selfCharacter[0].y : -1.0f, 124 | (slfChrSz>1) ? detector.selfCharacter[1].x : -1.0f, 125 | (slfChrSz>1) ? detector.selfCharacter[1].y : -1.0f, 126 | (othrChrSz>0) ? o_t[0].x : -1.0f, 127 | (othrChrSz>0) ? o_t[0].y : -1.0f, 128 | (othrChrSz>1) ? o_t[1].x : -1.0f, 129 | (othrChrSz>1) ? o_t[1].y : -1.0f, 130 | detector.getWaveTimer(), 131 | detector.tracking ? 1.0f : 0.0f, 132 | (float)detector.getSizeOfSelf(), 133 | (float)statev[0], 134 | (float)statev[1], 135 | (float)statev[2], 136 | (float)statev[3] 137 | }; 138 | 139 | jfloatArray state = env->NewFloatArray(15); 140 | env->SetFloatArrayRegion(state,0,15,a); 141 | return state; 142 | } 143 | 144 | // VideoCapture vc; 145 | Mat frame; 146 | int frame_index; 147 | string frame_prefix; 148 | Mat alpha; 149 | 150 | JNIEXPORT void JNICALL Java_edu_mit_media_fluid_royshil_headfollower_CharacterTrackerView_ResetFrameIndex(JNIEnv* env, 151 | jobject thiz) 152 | { 153 | frame_index = 1; 154 | } 155 | 156 | #define ERROR_FILE_DOESNT_EXIST -1 157 | #define ERROR_FRAME_DATA_NULL -2 158 | #define ERROR_VIDEOCAPTURE_NOT_OPEN -3 159 | 160 | JNIEXPORT jintArray JNICALL Java_edu_mit_media_fluid_royshil_headfollower_FileFrameProcessor_OpenFromFile( 161 | JNIEnv* env, 162 | jobject thiz, 163 | jstring filelocation 164 | ) 165 | { 166 | const char* _str = env->GetStringUTFChars(filelocation, 0); 167 | frame_prefix = _str; 168 | env->ReleaseStringUTFChars(filelocation, _str); 169 | 170 | jintArray retinta = env->NewIntArray(2); 171 | int reta[2]; 172 | 173 | frame_index = 1; 174 | stringstream frame_name; frame_name << frame_prefix << frame_index << ".png"; 175 | struct stat BUF; 176 | if(stat(frame_name.str().c_str(),&BUF)==0) 177 | { 178 | // if(vc.open(ret)) 179 | { 180 | // if (vc.isOpened()) 181 | { 182 | // vc >> frame; 183 | frame = imread(frame_name.str()); 184 | 185 | if(frame.data) { 186 | frame_index++; 187 | detector.shouldResize = false; 188 | reta[0] = frame.cols; 189 | reta[1] = frame.rows; 190 | 191 | alpha.create(frame.size(),CV_8UC1); 192 | alpha.setTo(255); 193 | } else 194 | reta[0] = ERROR_FRAME_DATA_NULL; 195 | } 196 | // else 197 | // reta[0] = ERROR_VIDEOCAPTURE_NOT_OPEN; 198 | } 199 | // else 200 | // reta[0] = ERROR_VIDEOCAPTURE_NOT_OPEN; 201 | } else 202 | reta[0] = ERROR_FILE_DOESNT_EXIST; 203 | 204 | env->SetIntArrayRegion(retinta,0,2,reta); 205 | return retinta; 206 | } 207 | 208 | JNIEXPORT jfloatArray JNICALL Java_edu_mit_media_fluid_royshil_headfollower_CharacterTrackerView_ProcessFileFrame( 209 | JNIEnv* env, 210 | jobject thiz, 211 | jint width, 212 | jint height, 213 | jintArray bgra, 214 | jint i_am, 215 | jboolean _flip, 216 | jboolean _debug ) 217 | { 218 | //get the output byte array to work on 219 | jint* _bgra = env->GetIntArrayElements(bgra, 0); 220 | Mat mbgra(height, width, CV_8UC4, (unsigned char *)_bgra); 221 | 222 | // vc >> frame; 223 | stringstream frame_name; frame_name << frame_prefix << frame_index << ".png"; 224 | frame = imread(frame_name.str()); 225 | frame_index++; 226 | 227 | jfloatArray retval = GoDetector(env, frame, i_am, _flip, _debug); 228 | 229 | int fromTo[8] = {0,0, 1,1, 2,2, 3,3}; 230 | Mat srcs[2] = {frame,alpha}; 231 | mixChannels(srcs,2,&mbgra,1,fromTo,4); // fill the buffer.. 232 | 233 | // jfloatArray retval = env->NewFloatArray(11); 234 | // jfloat flta[11] = {1.0f,1.0f,1.0f,1.0f,1.0f,1.0f,1.0f,1.0f,1.0f,1.0f,1.0f,1.0f}; 235 | // env->SetFloatArrayRegion(retval,0,11,flta); 236 | 237 | env->ReleaseIntArrayElements(bgra, _bgra, 0); 238 | 239 | return retval; 240 | } 241 | 242 | JNIEXPORT void JNICALL Java_edu_mit_media_fluid_royshil_headfollower_CharacterTrackerView_WriteFrame( 243 | JNIEnv* env, 244 | jobject thiz, 245 | jint width, 246 | jint height, 247 | jbyteArray yuv 248 | ) 249 | { 250 | jbyte* _yuv = env->GetByteArrayElements(yuv, 0); 251 | 252 | Mat myuv(height + height/2, width, CV_8UC1, (unsigned char *)_yuv); 253 | Mat bgr; 254 | 255 | cvtColor(myuv, bgr, CV_YUV420sp2BGR); 256 | 257 | env->ReleaseByteArrayElements(yuv, _yuv, 0); 258 | 259 | stringstream ss; ss << "/sdcard/saved/frame" << frame_index++ << ".png"; 260 | imwrite(ss.str(),bgr); 261 | } 262 | 263 | JNIEXPORT void JNICALL Java_edu_mit_media_fluid_royshil_headfollower_CharacterTrackerView_SetCalibrationState( 264 | JNIEnv* env, 265 | jobject thiz, 266 | jint state_to_set) 267 | { 268 | detector.setCalibrationState(state_to_set); 269 | } 270 | } 271 | -------------------------------------------------------------------------------- /src/edu/mit/media/fluid/royshil/headfollower/Sensors.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2007 The Android Open Source Project 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package edu.mit.media.fluid.royshil.headfollower; 18 | 19 | import android.app.Activity; 20 | import android.content.Context; 21 | import android.os.Bundle; 22 | import android.view.View; 23 | import android.hardware.SensorManager; 24 | import android.hardware.SensorListener; 25 | import android.util.Log; 26 | import android.graphics.Bitmap; 27 | import android.graphics.Canvas; 28 | import android.graphics.Color; 29 | import android.graphics.Paint; 30 | import android.graphics.Path; 31 | import android.graphics.RectF; 32 | 33 | /** 34 | *

Application that displays the values of the acceleration sensor graphically.

35 | 36 |

This demonstrates the {@link android.hardware.SensorManager android.hardware.SensorManager} class. 37 | 38 |

Demo

39 | OS / Sensors 40 | 41 |

Source files

42 | * 43 | * 44 | * 45 | * 46 | * 47 | *
src/com.example.android.apis/os/Sensors.javaSensors
48 | */ 49 | public class Sensors extends Activity { 50 | /** Tag string for our debug logs */ 51 | private static final String TAG = "Sensors"; 52 | 53 | private SensorManager mSensorManager; 54 | private GraphView mGraphView; 55 | 56 | private class GraphView extends View implements SensorListener 57 | { 58 | private Bitmap mBitmap; 59 | private Paint mPaint = new Paint(); 60 | private Canvas mCanvas = new Canvas(); 61 | private Path mPath = new Path(); 62 | private RectF mRect = new RectF(); 63 | private float mLastValues[] = new float[3*2]; 64 | private float mOrientationValues[] = new float[3]; 65 | private int mColors[] = new int[3*2]; 66 | private float mLastX; 67 | private float mScale[] = new float[2]; 68 | private float mYOffset; 69 | private float mMaxX; 70 | private float mSpeed = 1.0f; 71 | private float mWidth; 72 | private float mHeight; 73 | 74 | public GraphView(Context context) { 75 | super(context); 76 | mColors[0] = Color.argb(192, 255, 64, 64); 77 | mColors[1] = Color.argb(192, 64, 128, 64); 78 | mColors[2] = Color.argb(192, 64, 64, 255); 79 | mColors[3] = Color.argb(192, 64, 255, 255); 80 | mColors[4] = Color.argb(192, 128, 64, 128); 81 | mColors[5] = Color.argb(192, 255, 255, 64); 82 | 83 | mPaint.setFlags(Paint.ANTI_ALIAS_FLAG); 84 | mRect.set(-0.5f, -0.5f, 0.5f, 0.5f); 85 | mPath.arcTo(mRect, 0, 180); 86 | } 87 | 88 | @Override 89 | protected void onSizeChanged(int w, int h, int oldw, int oldh) { 90 | mBitmap = Bitmap.createBitmap(w, h, Bitmap.Config.RGB_565); 91 | mCanvas.setBitmap(mBitmap); 92 | mCanvas.drawColor(0xFFFFFFFF); 93 | mYOffset = h * 0.5f; 94 | mScale[0] = - (h * 0.5f * (1.0f / (SensorManager.STANDARD_GRAVITY * 2))); 95 | mScale[1] = - (h * 0.5f * (1.0f / (SensorManager.MAGNETIC_FIELD_EARTH_MAX))); 96 | mWidth = w; 97 | mHeight = h; 98 | if (mWidth < mHeight) { 99 | mMaxX = w; 100 | } else { 101 | mMaxX = w-50; 102 | } 103 | mLastX = mMaxX; 104 | super.onSizeChanged(w, h, oldw, oldh); 105 | } 106 | 107 | @Override 108 | protected void onDraw(Canvas canvas) { 109 | synchronized (this) { 110 | if (mBitmap != null) { 111 | final Paint paint = mPaint; 112 | final Path path = mPath; 113 | final int outer = 0xFFC0C0C0; 114 | final int inner = 0xFFff7010; 115 | 116 | if (mLastX >= mMaxX) { 117 | mLastX = 0; 118 | final Canvas cavas = mCanvas; 119 | final float yoffset = mYOffset; 120 | final float maxx = mMaxX; 121 | final float oneG = SensorManager.STANDARD_GRAVITY * mScale[0]; 122 | paint.setColor(0xFFAAAAAA); 123 | cavas.drawColor(0xFFFFFFFF); 124 | cavas.drawLine(0, yoffset, maxx, yoffset, paint); 125 | cavas.drawLine(0, yoffset+oneG, maxx, yoffset+oneG, paint); 126 | cavas.drawLine(0, yoffset-oneG, maxx, yoffset-oneG, paint); 127 | } 128 | canvas.drawBitmap(mBitmap, 0, 0, null); 129 | 130 | float[] values = mOrientationValues; 131 | if (mWidth < mHeight) { 132 | float w0 = mWidth * 0.333333f; 133 | float w = w0 - 32; 134 | float x = w0*0.5f; 135 | for (int i=0 ; i<3 ; i++) { 136 | canvas.save(Canvas.MATRIX_SAVE_FLAG); 137 | canvas.translate(x, w*0.5f + 4.0f); 138 | canvas.save(Canvas.MATRIX_SAVE_FLAG); 139 | paint.setColor(outer); 140 | canvas.scale(w, w); 141 | canvas.drawOval(mRect, paint); 142 | canvas.restore(); 143 | canvas.scale(w-5, w-5); 144 | paint.setColor(inner); 145 | canvas.rotate(-values[i]); 146 | canvas.drawPath(path, paint); 147 | canvas.restore(); 148 | x += w0; 149 | } 150 | } else { 151 | float h0 = mHeight * 0.333333f; 152 | float h = h0 - 32; 153 | float y = h0*0.5f; 154 | for (int i=0 ; i<3 ; i++) { 155 | canvas.save(Canvas.MATRIX_SAVE_FLAG); 156 | canvas.translate(mWidth - (h*0.5f + 4.0f), y); 157 | canvas.save(Canvas.MATRIX_SAVE_FLAG); 158 | paint.setColor(outer); 159 | canvas.scale(h, h); 160 | canvas.drawOval(mRect, paint); 161 | canvas.restore(); 162 | canvas.scale(h-5, h-5); 163 | paint.setColor(inner); 164 | canvas.rotate(-values[i]); 165 | canvas.drawPath(path, paint); 166 | canvas.restore(); 167 | y += h0; 168 | } 169 | } 170 | 171 | } 172 | } 173 | } 174 | 175 | public void onSensorChanged(int sensor, float[] values) { 176 | //Log.d(TAG, "sensor: " + sensor + ", x: " + values[0] + ", y: " + values[1] + ", z: " + values[2]); 177 | synchronized (this) { 178 | if (mBitmap != null) { 179 | final Canvas canvas = mCanvas; 180 | final Paint paint = mPaint; 181 | if (sensor == SensorManager.SENSOR_ORIENTATION) { 182 | for (int i=0 ; i<3 ; i++) { 183 | mOrientationValues[i] = values[i]; 184 | } 185 | } else { 186 | float deltaX = mSpeed; 187 | float newX = mLastX + deltaX; 188 | 189 | int j = (sensor == SensorManager.SENSOR_MAGNETIC_FIELD) ? 1 : 0; 190 | for (int i=0 ; i<3 ; i++) { 191 | int k = i+j*3; 192 | final float v = mYOffset + values[i] * mScale[j]; 193 | paint.setColor(mColors[k]); 194 | canvas.drawLine(mLastX, mLastValues[k], newX, v, paint); 195 | mLastValues[k] = v; 196 | } 197 | if (sensor == SensorManager.SENSOR_MAGNETIC_FIELD) 198 | mLastX += mSpeed; 199 | } 200 | invalidate(); 201 | } 202 | } 203 | } 204 | 205 | public void onAccuracyChanged(int sensor, int accuracy) { 206 | // TODO Auto-generated method stub 207 | 208 | } 209 | } 210 | 211 | /** 212 | * Initialization of the Activity after it is first created. Must at least 213 | * call {@link android.app.Activity#setContentView setContentView()} to 214 | * describe what is to be displayed in the screen. 215 | */ 216 | @Override 217 | protected void onCreate(Bundle savedInstanceState) { 218 | // Be sure to call the super class. 219 | super.onCreate(savedInstanceState); 220 | 221 | mSensorManager = (SensorManager) getSystemService(SENSOR_SERVICE); 222 | mGraphView = new GraphView(this); 223 | setContentView(mGraphView); 224 | } 225 | 226 | @Override 227 | protected void onResume() { 228 | super.onResume(); 229 | mSensorManager.registerListener(mGraphView, 230 | SensorManager.SENSOR_ACCELEROMETER | 231 | SensorManager.SENSOR_MAGNETIC_FIELD | 232 | SensorManager.SENSOR_ORIENTATION, 233 | SensorManager.SENSOR_DELAY_FASTEST); 234 | } 235 | 236 | @Override 237 | protected void onStop() { 238 | mSensorManager.unregisterListener(mGraphView); 239 | super.onStop(); 240 | } 241 | } 242 | -------------------------------------------------------------------------------- /jni/PCOpenCVTest/PCOpenCVTest.xcodeproj/project.pbxproj: -------------------------------------------------------------------------------- 1 | // !$*UTF8*$! 2 | { 3 | archiveVersion = 1; 4 | classes = { 5 | }; 6 | objectVersion = 45; 7 | objects = { 8 | 9 | /* Begin PBXBuildFile section */ 10 | 8DD76FAC0486AB0100D96B5E /* main.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 08FB7796FE84155DC02AAC07 /* main.cpp */; settings = {ATTRIBUTES = (); }; }; 11 | 8DD76FB00486AB0100D96B5E /* PCOpenCVTest.1 in CopyFiles */ = {isa = PBXBuildFile; fileRef = C6A0FF2C0290799A04C91782 /* PCOpenCVTest.1 */; }; 12 | D70AA19312F779B000600110 /* Detector.i in CopyFiles */ = {isa = PBXBuildFile; fileRef = D70AA19212F779B000600110 /* Detector.i */; }; 13 | D77E8CAA12F1DA26002A892F /* Detector.cpp in Sources */ = {isa = PBXBuildFile; fileRef = D77E8CA812F1DA26002A892F /* Detector.cpp */; }; 14 | D7E7D7BA140A9F0B00222C62 /* libopencv_core.2.3.1.dylib in Frameworks */ = {isa = PBXBuildFile; fileRef = D7E7D7B9140A9F0B00222C62 /* libopencv_core.2.3.1.dylib */; }; 15 | D7E7D7BE140A9F2700222C62 /* libopencv_video.2.3.1.dylib in Frameworks */ = {isa = PBXBuildFile; fileRef = D7E7D7BD140A9F2700222C62 /* libopencv_video.2.3.1.dylib */; }; 16 | D7E7D7C6140AA18100222C62 /* libopencv_highgui.2.3.1.dylib in Frameworks */ = {isa = PBXBuildFile; fileRef = D7E7D7C5140AA18100222C62 /* libopencv_highgui.2.3.1.dylib */; }; 17 | D7E7D7CA140AA1A400222C62 /* libopencv_features2d.2.3.1.dylib in Frameworks */ = {isa = PBXBuildFile; fileRef = D7E7D7C9140AA1A400222C62 /* libopencv_features2d.2.3.1.dylib */; }; 18 | D7E7D7CE140AA1B200222C62 /* libopencv_objdetect.2.3.1.dylib in Frameworks */ = {isa = PBXBuildFile; fileRef = D7E7D7CD140AA1B200222C62 /* libopencv_objdetect.2.3.1.dylib */; }; 19 | D7E7D7D2140AA1CB00222C62 /* libopencv_calib3d.2.3.1.dylib in Frameworks */ = {isa = PBXBuildFile; fileRef = D7E7D7D1140AA1CB00222C62 /* libopencv_calib3d.2.3.1.dylib */; }; 20 | D7E7D7D4140AA1CB00222C62 /* libopencv_contrib.2.3.1.dylib in Frameworks */ = {isa = PBXBuildFile; fileRef = D7E7D7D3140AA1CB00222C62 /* libopencv_contrib.2.3.1.dylib */; }; 21 | D7E7D7D6140AA1CB00222C62 /* libopencv_flann.2.3.1.dylib in Frameworks */ = {isa = PBXBuildFile; fileRef = D7E7D7D5140AA1CB00222C62 /* libopencv_flann.2.3.1.dylib */; }; 22 | D7E7D7D8140AA1CB00222C62 /* libopencv_gpu.2.3.1.dylib in Frameworks */ = {isa = PBXBuildFile; fileRef = D7E7D7D7140AA1CB00222C62 /* libopencv_gpu.2.3.1.dylib */; }; 23 | D7E7D7DA140AA1CB00222C62 /* libopencv_imgproc.2.3.1.dylib in Frameworks */ = {isa = PBXBuildFile; fileRef = D7E7D7D9140AA1CB00222C62 /* libopencv_imgproc.2.3.1.dylib */; }; 24 | D7E7D7DC140AA1CB00222C62 /* libopencv_legacy.2.3.1.dylib in Frameworks */ = {isa = PBXBuildFile; fileRef = D7E7D7DB140AA1CB00222C62 /* libopencv_legacy.2.3.1.dylib */; }; 25 | D7E7D7DE140AA1CB00222C62 /* libopencv_ml.2.3.1.dylib in Frameworks */ = {isa = PBXBuildFile; fileRef = D7E7D7DD140AA1CB00222C62 /* libopencv_ml.2.3.1.dylib */; }; 26 | D7E7D7E0140AA1CB00222C62 /* libopencv_ts.2.3.1.dylib in Frameworks */ = {isa = PBXBuildFile; fileRef = D7E7D7DF140AA1CB00222C62 /* libopencv_ts.2.3.1.dylib */; }; 27 | /* End PBXBuildFile section */ 28 | 29 | /* Begin PBXCopyFilesBuildPhase section */ 30 | 8DD76FAF0486AB0100D96B5E /* CopyFiles */ = { 31 | isa = PBXCopyFilesBuildPhase; 32 | buildActionMask = 8; 33 | dstPath = /usr/share/man/man1/; 34 | dstSubfolderSpec = 0; 35 | files = ( 36 | D70AA19312F779B000600110 /* Detector.i in CopyFiles */, 37 | 8DD76FB00486AB0100D96B5E /* PCOpenCVTest.1 in CopyFiles */, 38 | ); 39 | runOnlyForDeploymentPostprocessing = 1; 40 | }; 41 | /* End PBXCopyFilesBuildPhase section */ 42 | 43 | /* Begin PBXFileReference section */ 44 | 08FB7796FE84155DC02AAC07 /* main.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = main.cpp; sourceTree = ""; }; 45 | 8DD76FB20486AB0100D96B5E /* PCOpenCVTest */ = {isa = PBXFileReference; explicitFileType = "compiled.mach-o.executable"; includeInIndex = 0; path = PCOpenCVTest; sourceTree = BUILT_PRODUCTS_DIR; }; 46 | C6A0FF2C0290799A04C91782 /* PCOpenCVTest.1 */ = {isa = PBXFileReference; lastKnownFileType = text.man; path = PCOpenCVTest.1; sourceTree = ""; }; 47 | D70AA19212F779B000600110 /* Detector.i */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c.preprocessed; name = Detector.i; path = ../Detector.i; sourceTree = SOURCE_ROOT; }; 48 | D77E8CA812F1DA26002A892F /* Detector.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = Detector.cpp; path = ../Detector.cpp; sourceTree = SOURCE_ROOT; }; 49 | D77E8CA912F1DA26002A892F /* Detector.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = Detector.h; path = ../Detector.h; sourceTree = SOURCE_ROOT; }; 50 | D7E7D7B9140A9F0B00222C62 /* libopencv_core.2.3.1.dylib */ = {isa = PBXFileReference; lastKnownFileType = "compiled.mach-o.dylib"; name = libopencv_core.2.3.1.dylib; path = usr/local/lib/libopencv_core.2.3.1.dylib; sourceTree = SDKROOT; }; 51 | D7E7D7BD140A9F2700222C62 /* libopencv_video.2.3.1.dylib */ = {isa = PBXFileReference; lastKnownFileType = "compiled.mach-o.dylib"; name = libopencv_video.2.3.1.dylib; path = usr/local/lib/libopencv_video.2.3.1.dylib; sourceTree = SDKROOT; }; 52 | D7E7D7C5140AA18100222C62 /* libopencv_highgui.2.3.1.dylib */ = {isa = PBXFileReference; lastKnownFileType = "compiled.mach-o.dylib"; name = libopencv_highgui.2.3.1.dylib; path = usr/local/lib/libopencv_highgui.2.3.1.dylib; sourceTree = SDKROOT; }; 53 | D7E7D7C9140AA1A400222C62 /* libopencv_features2d.2.3.1.dylib */ = {isa = PBXFileReference; lastKnownFileType = "compiled.mach-o.dylib"; name = libopencv_features2d.2.3.1.dylib; path = usr/local/lib/libopencv_features2d.2.3.1.dylib; sourceTree = SDKROOT; }; 54 | D7E7D7CD140AA1B200222C62 /* libopencv_objdetect.2.3.1.dylib */ = {isa = PBXFileReference; lastKnownFileType = "compiled.mach-o.dylib"; name = libopencv_objdetect.2.3.1.dylib; path = usr/local/lib/libopencv_objdetect.2.3.1.dylib; sourceTree = SDKROOT; }; 55 | D7E7D7D1140AA1CB00222C62 /* libopencv_calib3d.2.3.1.dylib */ = {isa = PBXFileReference; lastKnownFileType = "compiled.mach-o.dylib"; name = libopencv_calib3d.2.3.1.dylib; path = usr/local/lib/libopencv_calib3d.2.3.1.dylib; sourceTree = SDKROOT; }; 56 | D7E7D7D3140AA1CB00222C62 /* libopencv_contrib.2.3.1.dylib */ = {isa = PBXFileReference; lastKnownFileType = "compiled.mach-o.dylib"; name = libopencv_contrib.2.3.1.dylib; path = usr/local/lib/libopencv_contrib.2.3.1.dylib; sourceTree = SDKROOT; }; 57 | D7E7D7D5140AA1CB00222C62 /* libopencv_flann.2.3.1.dylib */ = {isa = PBXFileReference; lastKnownFileType = "compiled.mach-o.dylib"; name = libopencv_flann.2.3.1.dylib; path = usr/local/lib/libopencv_flann.2.3.1.dylib; sourceTree = SDKROOT; }; 58 | D7E7D7D7140AA1CB00222C62 /* libopencv_gpu.2.3.1.dylib */ = {isa = PBXFileReference; lastKnownFileType = "compiled.mach-o.dylib"; name = libopencv_gpu.2.3.1.dylib; path = usr/local/lib/libopencv_gpu.2.3.1.dylib; sourceTree = SDKROOT; }; 59 | D7E7D7D9140AA1CB00222C62 /* libopencv_imgproc.2.3.1.dylib */ = {isa = PBXFileReference; lastKnownFileType = "compiled.mach-o.dylib"; name = libopencv_imgproc.2.3.1.dylib; path = usr/local/lib/libopencv_imgproc.2.3.1.dylib; sourceTree = SDKROOT; }; 60 | D7E7D7DB140AA1CB00222C62 /* libopencv_legacy.2.3.1.dylib */ = {isa = PBXFileReference; lastKnownFileType = "compiled.mach-o.dylib"; name = libopencv_legacy.2.3.1.dylib; path = usr/local/lib/libopencv_legacy.2.3.1.dylib; sourceTree = SDKROOT; }; 61 | D7E7D7DD140AA1CB00222C62 /* libopencv_ml.2.3.1.dylib */ = {isa = PBXFileReference; lastKnownFileType = "compiled.mach-o.dylib"; name = libopencv_ml.2.3.1.dylib; path = usr/local/lib/libopencv_ml.2.3.1.dylib; sourceTree = SDKROOT; }; 62 | D7E7D7DF140AA1CB00222C62 /* libopencv_ts.2.3.1.dylib */ = {isa = PBXFileReference; lastKnownFileType = "compiled.mach-o.dylib"; name = libopencv_ts.2.3.1.dylib; path = usr/local/lib/libopencv_ts.2.3.1.dylib; sourceTree = SDKROOT; }; 63 | /* End PBXFileReference section */ 64 | 65 | /* Begin PBXFrameworksBuildPhase section */ 66 | 8DD76FAD0486AB0100D96B5E /* Frameworks */ = { 67 | isa = PBXFrameworksBuildPhase; 68 | buildActionMask = 2147483647; 69 | files = ( 70 | D7E7D7BA140A9F0B00222C62 /* libopencv_core.2.3.1.dylib in Frameworks */, 71 | D7E7D7BE140A9F2700222C62 /* libopencv_video.2.3.1.dylib in Frameworks */, 72 | D7E7D7C6140AA18100222C62 /* libopencv_highgui.2.3.1.dylib in Frameworks */, 73 | D7E7D7CA140AA1A400222C62 /* libopencv_features2d.2.3.1.dylib in Frameworks */, 74 | D7E7D7CE140AA1B200222C62 /* libopencv_objdetect.2.3.1.dylib in Frameworks */, 75 | D7E7D7D2140AA1CB00222C62 /* libopencv_calib3d.2.3.1.dylib in Frameworks */, 76 | D7E7D7D4140AA1CB00222C62 /* libopencv_contrib.2.3.1.dylib in Frameworks */, 77 | D7E7D7D6140AA1CB00222C62 /* libopencv_flann.2.3.1.dylib in Frameworks */, 78 | D7E7D7D8140AA1CB00222C62 /* libopencv_gpu.2.3.1.dylib in Frameworks */, 79 | D7E7D7DA140AA1CB00222C62 /* libopencv_imgproc.2.3.1.dylib in Frameworks */, 80 | D7E7D7DC140AA1CB00222C62 /* libopencv_legacy.2.3.1.dylib in Frameworks */, 81 | D7E7D7DE140AA1CB00222C62 /* libopencv_ml.2.3.1.dylib in Frameworks */, 82 | D7E7D7E0140AA1CB00222C62 /* libopencv_ts.2.3.1.dylib in Frameworks */, 83 | ); 84 | runOnlyForDeploymentPostprocessing = 0; 85 | }; 86 | /* End PBXFrameworksBuildPhase section */ 87 | 88 | /* Begin PBXGroup section */ 89 | 08FB7794FE84155DC02AAC07 /* PCOpenCVTest */ = { 90 | isa = PBXGroup; 91 | children = ( 92 | 08FB7795FE84155DC02AAC07 /* Source */, 93 | C6A0FF2B0290797F04C91782 /* Documentation */, 94 | 1AB674ADFE9D54B511CA2CBB /* Products */, 95 | D7E7D7B9140A9F0B00222C62 /* libopencv_core.2.3.1.dylib */, 96 | D7E7D7BD140A9F2700222C62 /* libopencv_video.2.3.1.dylib */, 97 | D7E7D7C5140AA18100222C62 /* libopencv_highgui.2.3.1.dylib */, 98 | D7E7D7C9140AA1A400222C62 /* libopencv_features2d.2.3.1.dylib */, 99 | D7E7D7CD140AA1B200222C62 /* libopencv_objdetect.2.3.1.dylib */, 100 | D7E7D7D1140AA1CB00222C62 /* libopencv_calib3d.2.3.1.dylib */, 101 | D7E7D7D3140AA1CB00222C62 /* libopencv_contrib.2.3.1.dylib */, 102 | D7E7D7D5140AA1CB00222C62 /* libopencv_flann.2.3.1.dylib */, 103 | D7E7D7D7140AA1CB00222C62 /* libopencv_gpu.2.3.1.dylib */, 104 | D7E7D7D9140AA1CB00222C62 /* libopencv_imgproc.2.3.1.dylib */, 105 | D7E7D7DB140AA1CB00222C62 /* libopencv_legacy.2.3.1.dylib */, 106 | D7E7D7DD140AA1CB00222C62 /* libopencv_ml.2.3.1.dylib */, 107 | D7E7D7DF140AA1CB00222C62 /* libopencv_ts.2.3.1.dylib */, 108 | ); 109 | name = PCOpenCVTest; 110 | sourceTree = ""; 111 | }; 112 | 08FB7795FE84155DC02AAC07 /* Source */ = { 113 | isa = PBXGroup; 114 | children = ( 115 | D70AA19212F779B000600110 /* Detector.i */, 116 | D77E8CA812F1DA26002A892F /* Detector.cpp */, 117 | D77E8CA912F1DA26002A892F /* Detector.h */, 118 | 08FB7796FE84155DC02AAC07 /* main.cpp */, 119 | ); 120 | name = Source; 121 | sourceTree = ""; 122 | }; 123 | 1AB674ADFE9D54B511CA2CBB /* Products */ = { 124 | isa = PBXGroup; 125 | children = ( 126 | 8DD76FB20486AB0100D96B5E /* PCOpenCVTest */, 127 | ); 128 | name = Products; 129 | sourceTree = ""; 130 | }; 131 | C6A0FF2B0290797F04C91782 /* Documentation */ = { 132 | isa = PBXGroup; 133 | children = ( 134 | C6A0FF2C0290799A04C91782 /* PCOpenCVTest.1 */, 135 | ); 136 | name = Documentation; 137 | sourceTree = ""; 138 | }; 139 | /* End PBXGroup section */ 140 | 141 | /* Begin PBXNativeTarget section */ 142 | 8DD76FA90486AB0100D96B5E /* PCOpenCVTest */ = { 143 | isa = PBXNativeTarget; 144 | buildConfigurationList = 1DEB928508733DD80010E9CD /* Build configuration list for PBXNativeTarget "PCOpenCVTest" */; 145 | buildPhases = ( 146 | 8DD76FAB0486AB0100D96B5E /* Sources */, 147 | 8DD76FAD0486AB0100D96B5E /* Frameworks */, 148 | 8DD76FAF0486AB0100D96B5E /* CopyFiles */, 149 | ); 150 | buildRules = ( 151 | ); 152 | dependencies = ( 153 | ); 154 | name = PCOpenCVTest; 155 | productInstallPath = "$(HOME)/bin"; 156 | productName = PCOpenCVTest; 157 | productReference = 8DD76FB20486AB0100D96B5E /* PCOpenCVTest */; 158 | productType = "com.apple.product-type.tool"; 159 | }; 160 | /* End PBXNativeTarget section */ 161 | 162 | /* Begin PBXProject section */ 163 | 08FB7793FE84155DC02AAC07 /* Project object */ = { 164 | isa = PBXProject; 165 | buildConfigurationList = 1DEB928908733DD80010E9CD /* Build configuration list for PBXProject "PCOpenCVTest" */; 166 | compatibilityVersion = "Xcode 3.1"; 167 | developmentRegion = English; 168 | hasScannedForEncodings = 1; 169 | knownRegions = ( 170 | English, 171 | Japanese, 172 | French, 173 | German, 174 | ); 175 | mainGroup = 08FB7794FE84155DC02AAC07 /* PCOpenCVTest */; 176 | projectDirPath = ""; 177 | projectRoot = ""; 178 | targets = ( 179 | 8DD76FA90486AB0100D96B5E /* PCOpenCVTest */, 180 | ); 181 | }; 182 | /* End PBXProject section */ 183 | 184 | /* Begin PBXSourcesBuildPhase section */ 185 | 8DD76FAB0486AB0100D96B5E /* Sources */ = { 186 | isa = PBXSourcesBuildPhase; 187 | buildActionMask = 2147483647; 188 | files = ( 189 | 8DD76FAC0486AB0100D96B5E /* main.cpp in Sources */, 190 | D77E8CAA12F1DA26002A892F /* Detector.cpp in Sources */, 191 | ); 192 | runOnlyForDeploymentPostprocessing = 0; 193 | }; 194 | /* End PBXSourcesBuildPhase section */ 195 | 196 | /* Begin XCBuildConfiguration section */ 197 | 1DEB928608733DD80010E9CD /* Debug */ = { 198 | isa = XCBuildConfiguration; 199 | buildSettings = { 200 | ALWAYS_SEARCH_USER_PATHS = NO; 201 | COPY_PHASE_STRIP = NO; 202 | GCC_DYNAMIC_NO_PIC = NO; 203 | GCC_ENABLE_FIX_AND_CONTINUE = YES; 204 | GCC_MODEL_TUNING = G5; 205 | GCC_OPTIMIZATION_LEVEL = 0; 206 | INSTALL_PATH = /usr/local/bin; 207 | PRODUCT_NAME = PCOpenCVTest; 208 | }; 209 | name = Debug; 210 | }; 211 | 1DEB928708733DD80010E9CD /* Release */ = { 212 | isa = XCBuildConfiguration; 213 | buildSettings = { 214 | ALWAYS_SEARCH_USER_PATHS = NO; 215 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; 216 | GCC_MODEL_TUNING = G5; 217 | INSTALL_PATH = /usr/local/bin; 218 | PRODUCT_NAME = PCOpenCVTest; 219 | }; 220 | name = Release; 221 | }; 222 | 1DEB928A08733DD80010E9CD /* Debug */ = { 223 | isa = XCBuildConfiguration; 224 | buildSettings = { 225 | ARCHS = "$(ARCHS_STANDARD_32_64_BIT)"; 226 | GCC_C_LANGUAGE_STANDARD = gnu99; 227 | GCC_OPTIMIZATION_LEVEL = 0; 228 | GCC_PREPROCESSOR_DEFINITIONS = _PC_COMPILE; 229 | GCC_WARN_ABOUT_RETURN_TYPE = YES; 230 | GCC_WARN_UNUSED_VARIABLE = YES; 231 | HEADER_SEARCH_PATHS = /usr/local/include; 232 | ONLY_ACTIVE_ARCH = YES; 233 | PREBINDING = NO; 234 | SDKROOT = macosx10.6; 235 | }; 236 | name = Debug; 237 | }; 238 | 1DEB928B08733DD80010E9CD /* Release */ = { 239 | isa = XCBuildConfiguration; 240 | buildSettings = { 241 | ARCHS = "$(ARCHS_STANDARD_32_64_BIT)"; 242 | GCC_C_LANGUAGE_STANDARD = gnu99; 243 | GCC_WARN_ABOUT_RETURN_TYPE = YES; 244 | GCC_WARN_UNUSED_VARIABLE = YES; 245 | PREBINDING = NO; 246 | SDKROOT = macosx10.6; 247 | }; 248 | name = Release; 249 | }; 250 | /* End XCBuildConfiguration section */ 251 | 252 | /* Begin XCConfigurationList section */ 253 | 1DEB928508733DD80010E9CD /* Build configuration list for PBXNativeTarget "PCOpenCVTest" */ = { 254 | isa = XCConfigurationList; 255 | buildConfigurations = ( 256 | 1DEB928608733DD80010E9CD /* Debug */, 257 | 1DEB928708733DD80010E9CD /* Release */, 258 | ); 259 | defaultConfigurationIsVisible = 0; 260 | defaultConfigurationName = Release; 261 | }; 262 | 1DEB928908733DD80010E9CD /* Build configuration list for PBXProject "PCOpenCVTest" */ = { 263 | isa = XCConfigurationList; 264 | buildConfigurations = ( 265 | 1DEB928A08733DD80010E9CD /* Debug */, 266 | 1DEB928B08733DD80010E9CD /* Release */, 267 | ); 268 | defaultConfigurationIsVisible = 0; 269 | defaultConfigurationName = Release; 270 | }; 271 | /* End XCConfigurationList section */ 272 | }; 273 | rootObject = 08FB7793FE84155DC02AAC07 /* Project object */; 274 | } 275 | -------------------------------------------------------------------------------- /jni/Detector.cpp: -------------------------------------------------------------------------------- 1 | /* 2 | * Detector.cpp 3 | * 4 | * Created on: Aug 1, 2010 5 | * Author: roys 6 | */ 7 | 8 | #include "Detector.h" 9 | 10 | 11 | #include 12 | 13 | #include 14 | #include 15 | #include 16 | #include 17 | 18 | using namespace std; 19 | using namespace cv; 20 | 21 | //void GetCandidatePoints(vector& _points, Mat& img, Mat& gray, Mat& hsv, bool redOrBlue) { 22 | // vector corners; 23 | // _points.clear(); 24 | // 25 | // Mat _tmp; img.copyTo(_tmp); 26 | // vector chns; split(_tmp, chns); 27 | // 28 | //// for (int cn=0; cn(corners[i]); 39 | // { //high saturation 40 | // stringstream ss; ss << "(" << (int)hsvv.val[0] << "," << (int)hsvv.val[1] << "," << (int)hsvv.val[2] << ")"; 41 | // putText(img, ss.str(), corners[i], CV_FONT_HERSHEY_PLAIN, 1.0, Scalar(255), 1); 42 | // int h = hsvv[0], s = hsvv[1], v = hsvv[2]; 43 | // if((cn == 0 && h > 110 && h < 130 && s > 100 && v > 200)|| //red channel 44 | // //(cn == 1 && hsvv[1] > 40 && hsvv[2] > 80 && hsvv[0] > 50 && hsvv[0] < 100)|| //green channel 45 | // (cn == 2 && (h < 15 || h > 170) && s > 70 && v > 150)|| //blue channel 46 | // false) { 47 | // circle(img, corners[i], 3, Scalar(0,255,0), 2); 48 | // 49 | // _points.push_back(corners[i]); 50 | // } 51 | // else { 52 | // circle(img, corners[i], 3, Scalar(0,255,255), 2); 53 | // } 54 | // } 55 | // } 56 | // } 57 | // stringstream ss; ss << "cn=" << cn << ",rOb="; 58 | // if(redOrBlue) ss << "red"; 59 | // else ss << "blue"; 60 | // putText(img,ss.str(),Point(10,15),CV_FONT_HERSHEY_PLAIN,1.0,Scalar(255,255),1); 61 | //} 62 | 63 | vector Detector::GetPointsUsingBlobs(vector& _points, Mat& img, Mat& hsv, bool get_all_blobs, int i_am, bool _debug) { 64 | _points.clear(); 65 | 66 | vector state(3); 67 | state[0] = state[1] = state[2] = 0; 68 | 69 | Mat blobmask; 70 | 71 | { 72 | if(i_am == IAM_BLUE) { 73 | inRange(hsv, Scalar(0,80,210), Scalar(37,256,256), blobmask); 74 | } else if (i_am == IAM_RED) { 75 | inRange(hsv, Scalar(85,45,100), Scalar(120,256,256), blobmask); 76 | } 77 | } 78 | 79 | // cvtColor(blobmask,img,CV_GRAY2RGB); 80 | 81 | //#ifdef _PC_COMPILE 82 | // imshow("blobmask",blobmask); 83 | //#endif 84 | 85 | vector > contours; 86 | { 87 | Mat __tmp; blobmask.copyTo(__tmp); 88 | findContours( __tmp, contours, CV_RETR_EXTERNAL, CV_CHAIN_APPROX_SIMPLE ); 89 | } 90 | 91 | state[0] = contours.size(); 92 | 93 | int idx = 0, largestComp = -1, secondlargest = -1; 94 | float maxarea = -1.0f, secondmaxarea = -1.0f; 95 | 96 | for (; idx& c = contours[idx]; 99 | float area = (float)(contourArea(Mat(c))); //TODO: add previous detected marker distance 100 | if(area < 100 || area > 1000) continue; 101 | state[1]++; 102 | 103 | int num = contours[idx].size(); 104 | Point* pts = &(contours[idx][0]); 105 | 106 | //make a "similar" circle to match to 107 | Scalar _mean = mean(Mat(contours[idx])); 108 | 109 | // circle(img, Point(_mean[0],_mean[1]),2,Scalar(0,0,255),1); 110 | Mat new_circlePts = Mat(circlepts) + _mean; 111 | // vector > _circlepts; _circlepts.push_back(circlepts); 112 | // drawContours(img,_circlepts,0,Scalar(0,255,0)); 113 | 114 | double ellipsematch = matchShapes(Mat(contours[idx]), new_circlePts, CV_CONTOURS_MATCH_I2, 0.0); 115 | if (ellipsematch > 0.2) { //this is just not a circle.. 116 | continue; 117 | } 118 | state[2]++; 119 | 120 | if(_debug) { 121 | fillPoly(img, (const Point**)(&pts), &num, 1, Scalar(255,255,0)); 122 | Vec3b hsvv = hsv.at(_mean[1],_mean[0]); 123 | stringstream ss; ss << "h " << (int)hsvv[0] << " s " << (int)hsvv[1] << " v " << (int)hsvv[2]; 124 | putText(img,ss.str(),Point(_mean[0],_mean[1]),CV_FONT_HERSHEY_PLAIN,1.0,Scalar(255,255),1); 125 | } 126 | 127 | // if(_debug) { 128 | // stringstream ss; ss << setprecision(3) << "a = " << area << ", e = " << ellipsematch; 129 | // putText(img,ss.str(),Point(_mean[0],_mean[1]),CV_FONT_HERSHEY_PLAIN,1.0,Scalar(255,255),1); 130 | // } 131 | 132 | if(get_all_blobs) { 133 | _points.push_back(Point(_mean[0],_mean[1])); 134 | continue; 135 | } 136 | 137 | area = area / ellipsematch; 138 | 139 | if(area > maxarea) { //largest overthrown 140 | secondlargest = largestComp; 141 | secondmaxarea = maxarea; 142 | largestComp = idx; 143 | maxarea = area; 144 | } else if(area > secondmaxarea) { //second largest overthrown 145 | secondlargest = idx; 146 | secondmaxarea = area; 147 | } 148 | } 149 | if (get_all_blobs) { //skip getting only the top two 150 | return state; 151 | } 152 | for (int i=0; i=2 && selfCharacter.size()>=2) { 168 | return norm(Vec2i(otherCharacter[0]-otherCharacter[1]))/norm(Vec2i(selfCharacter[0]-selfCharacter[1])); 169 | } else { 170 | return 1.0; 171 | } 172 | } 173 | 174 | void Detector::TrackPoints(Rect markers[], bool _debug) { 175 | Rect trackWindow1 = markers[0],trackWindow2 = markers[1]; 176 | 177 | //Create mask out of pixels in HSV value range 178 | inRange(hsv, Scalar(0, 10, 80), 179 | // V V V 180 | Scalar(180, 256, 256), 181 | trackMask); 182 | //imshow("trackmask",trackMask); 183 | 184 | //Get only hue channel 185 | int ch[] = {0, 0}; 186 | if(!hue.data) 187 | hue.create(hsv.size(), hsv.depth()); 188 | mixChannels(&hsv, 1, &hue, 1, ch, 1); 189 | 190 | //New object selection - calculate new histogram 191 | if( this->trackObject < 0 ) 192 | { 193 | cout << "NEW HISTOGRAM" << endl; 194 | //Get histogram over hue channel 195 | // int histchannels[] = {1}; 196 | Mat roi(hsv, trackWindow1), maskroi(trackMask, trackWindow1); 197 | calcHist(&roi, 1, 0, maskroi, hist, 1, hsize, phranges); 198 | 199 | //Histogram of other candidate marker 200 | Mat hist1; 201 | Mat roi1(hsv, trackWindow2), maskroi1(trackMask, trackWindow2); 202 | 203 | calcHist(&roi1, 1, 0, maskroi1, hist1, 1, hsize, phranges); 204 | 205 | hist = hist + hist1; //combine histograms 206 | 207 | normalize(hist, hist, 0.0, 1.0, CV_MINMAX); 208 | 209 | // //Shift 8 cells of the histogram to the left, because red values are split on the 180 value line 210 | // Mat_ tmp(hist.size()); 211 | // for (int i=0; i(hist.rows/2+i); 213 | // tmp(0,hist.rows/2+i) = hist.at(i); 214 | // } 215 | // for (int i=0; i(i) = Vec3b(saturate_cast(i*180./hsize[0]), 255, 255); 228 | // cvtColor(buf, buf, CV_HSV2BGR); 229 | // 230 | // for( int i = 0; i < hsize[0]; i++ ) 231 | // { 232 | // int val = saturate_cast(hist.at(i)*histimg.rows); 233 | // rectangle( histimg, Point(i*binW,histimg.rows), 234 | // Point((i+1)*binW,histimg.rows - val), 235 | // Scalar(buf.at(i)), -1, 8 ); 236 | // } 237 | // } 238 | #endif 239 | 240 | //calculate variance of histogram, and this will be the measure to how good it is 241 | //low variance = a good capture of the color = a good location of the marker 242 | Scalar _mean,_stddev; 243 | // 244 | // //multiply histogram count by value to get E[X] (mean) 245 | // vector mults; 246 | // { 247 | // float step_ = 180.0f / (float)hist.rows; 248 | // for (int i=0; i 90); 264 | 265 | meanStdDev(both_8SC, _mean, _stddev); 266 | 267 | #ifdef _PC_COMPILE 268 | { 269 | stringstream ss; ss << "Stdv = " << _stddev[0]; 270 | putText(histimg, ss.str(), Point(10,10), CV_FONT_HERSHEY_PLAIN, 1.0, Scalar(255), 2); 271 | line(histimg, Point((_mean[0]+90)*histimg.cols/180.0,0), Point((_mean[0]+90)*histimg.cols/180.0,histimg.rows), Scalar(255), 4); 272 | imshow( "Histogram", histimg ); 273 | } 274 | #endif 275 | 276 | if (_stddev[0] > 20) { //std.deviation too high for a coherent marker 277 | this->trackObject = -1; 278 | this->tracking = false; 279 | if(_debug) { cout << "HISTOGRAM NOT COHERENT" << endl; } 280 | return; 281 | } else { 282 | this->trackObject = 1; //all good, Begin tracking 283 | } 284 | } 285 | 286 | //Calc histogram back-projection (can be shared between 2 markers, as they have the same color..) 287 | calcBackProject(&hsv, 1, 0, hist, backproj, phranges); 288 | backproj &= trackMask; 289 | // imshow("backproj",backproj); 290 | 291 | //Track object on back-projection 292 | RotatedRect trackBox1 = CamShift(backproj, trackWindow1, TermCriteria( CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1 )); 293 | RotatedRect trackBox2 = CamShift(backproj, trackWindow2, TermCriteria( CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1 )); 294 | 295 | if( trackWindow1.area() <= 1 || trackWindow2.area() <= 1) { 296 | this->tracking = false; 297 | this->trackObject = -1; 298 | cout << "LOST A MARKER" << endl; 299 | return; 300 | } 301 | // { 302 | // int cols = backproj.cols, rows = backproj.rows, r = (MIN(cols, rows) + 5)/6; 303 | // trackWindow = Rect(trackWindow.x - r, trackWindow.y - r, 304 | // trackWindow.x + r, trackWindow.y + r) & 305 | // Rect(0, 0, cols, rows); 306 | // } 307 | 308 | // if( backprojMode ) 309 | // cvtColor( backproj, image, CV_GRAY2BGR ); 310 | // ellipse( img, trackBox1, Scalar(0,0,255), 3, CV_AA ); 311 | // ellipse( img, trackBox2, Scalar(0,0,255), 3, CV_AA ); 312 | 313 | this->otherCharacter[0] = trackBox1.center; 314 | this->otherCharacter[1] = trackBox2.center; 315 | 316 | cout << "TRACKING OK" << endl; 317 | } 318 | 319 | void Detector::KalmanSmooth() { 320 | for (int i=0; i<2; i++) { 321 | Mat prediction = KF[i].predict(); 322 | Point predictPt(prediction.at(0),prediction.at(1)); 323 | this->measurement.at(0) = this->otherCharacter[i]; 324 | // Point2f* _ptr = &((this->measurement).at(0)); 325 | // *_ptr = this->otherCharacter[i]; 326 | // this->measurement(1) = this->otherCharacter[0].y; 327 | 328 | //Point measPt(measurement(0),measurement(1)); 329 | //mousev.push_back(measPt); 330 | // generate measurement 331 | //measurement += KF.measurementMatrix*state; 332 | 333 | Mat estimated = KF[i].correct(measurement); 334 | Point statePt(estimated.at(0),estimated.at(1)); 335 | //kalmanv.push_back(statePt); 336 | 337 | //TODO: if error is very high - get out of tracking mode 338 | 339 | this->otherCharacter[i] = statePt; 340 | } 341 | } 342 | 343 | //Experimental.... 344 | vector Detector::calibrateOtherCharacter(Mat& img, int i_am, bool _flip, bool _debug) { 345 | vector state(1,-1); 346 | if(!_img.data) return state; 347 | 348 | setupImages(_img,_flip); 349 | 350 | vector pts; 351 | vector blob_state = GetPointsUsingBlobs(pts, img, hsv, false, i_am, _debug); 352 | calib_history.push_back(pts); 353 | 354 | if(calib_history.size() > 5) { 355 | //get mean and standard deviation of last few iterations 356 | Scalar mean,stdv; 357 | meanStdDev(Mat(calib_history), mean, stdv); 358 | 359 | cout << "mean " << mean.val[0] << " stdv " << stdv.val[0] << endl; 360 | } 361 | 362 | return state; 363 | } 364 | 365 | vector Detector::calibrateSelfCharacter(Mat& _img, int i_am, bool _flip, bool _debug) { 366 | vector state(4); 367 | state[0] = state[1] = state[2] = state[3] = -1; 368 | 369 | if(!_img.data) return state; 370 | 371 | setupImages(_img,_flip); 372 | 373 | if(calibration_state == CALIBRATE_NO_MARKERS_FOUND) { 374 | //self localization, look for self markers 375 | vector blos_state = GetPointsUsingBlobs(selfCharacter, img, hsv, false, (i_am==IAM_RED)?IAM_BLUE:IAM_RED, _debug); 376 | state[1] = blos_state[0]; state[2] = blos_state[1]; state[3] = blos_state[2]; 377 | } 378 | 379 | if (selfCharacter.size() == 2) { 380 | if(calibration_state == CALIBRATE_NO_MARKERS_FOUND) { 381 | look_for_extra_marker_count = 0; 382 | calibration_state = CALIBRATE_SEND_EXTRA_MARKER; 383 | } else if (calibration_state == CALIBRATE_SEND_EXTRA_MARKER || calibration_state == CALIBRATE_NO_EXTRA_MARKER_FOUND) { 384 | look_for_extra_marker_count++; 385 | if (FindExtraMarkerUsingBlobs(i_am)) { 386 | //extra marker found -> position of self markers found 387 | calibration_state = CALIBRATE_FOUND; 388 | 389 | //compute angle between world y-axis and character 390 | Vec2f y(0,1); 391 | Vec2f c = Point2Vec2f(selfCharacter[0]-selfCharacter[1]); 392 | Vec2f cn = c * (1.0f/norm(c)); //normalize 393 | if (cn.dot(Vec2f(1,0)) < 0) { //if pointing down - flip 394 | cn = -cn; 395 | } 396 | character_to_world_ang = cn.dot(y) - atan2f(3.0f, 4.0f); 397 | } else { 398 | //Give it 10 frames to look for the marker before giving up 399 | calibration_state = CALIBRATE_NO_EXTRA_MARKER_FOUND; 400 | if (look_for_extra_marker_count > 5) { 401 | calibration_state = CALIBRATE_NO_MARKERS_FOUND; //ok give up 402 | } 403 | } 404 | } 405 | } else //not enough points to start calibration 406 | calibration_state = CALIBRATE_NO_MARKERS_FOUND; 407 | 408 | img.copyTo(_img); 409 | // int fromTo[] = {0,0, 1,1, 2,2}; 410 | // mixChannels(&img, 1, &_img, 1, fromTo, 3); 411 | state[0] = calibration_state; 412 | 413 | return state; 414 | } 415 | 416 | /** 417 | * Returns 4 points: first 2 is other character, second 2 is self character 418 | */ 419 | //#ifndef _PC_COMPILE 420 | //bool Detector::findCharacter(int idx, image_pool* pool, int i_am, bool _flip, bool _debug) { 421 | // Mat _img = pool->getImage(idx), 422 | //#else 423 | vector Detector::findCharacter(Mat& _img, int i_am, bool _flip, bool _debug) { 424 | vector state(4,-1); 425 | 426 | if(!_img.data) return state; 427 | 428 | setupImages(_img,_flip); 429 | 430 | if(!tracking) { 431 | //Initialize position of markers 432 | vector blobs_state = GetPointsUsingBlobs(otherCharacter, img, hsv, false, i_am, _debug); 433 | state[1] = blobs_state[0]; 434 | state[2] = blobs_state[1]; 435 | state[3] = blobs_state[2]; 436 | tracking = otherCharacter.size() >= 2; 437 | #ifdef _PC_COMPILE 438 | cout << "BLOB DETECT: " << state[1] << "," << state[2] << "," << state[3] << endl; 439 | if(tracking) 440 | cout << "BEGIN TRACKING" << endl; 441 | #endif 442 | } 443 | 444 | tracking = false; 445 | 446 | if (tracking) { 447 | //Track position of markers 448 | Rect markers[2] = { Rect(this->otherCharacter[0]-Point(10,10),Size(20,20)), 449 | Rect(this->otherCharacter[1]-Point(10,10),Size(20,20))}; 450 | // rectangle(img, markers[0], Scalar(255), 2); 451 | // rectangle(img, markers[1], Scalar(255), 2); 452 | TrackPoints(markers, _debug); //this->tracking may change here 453 | 454 | //TODO: check if tracking died, converged to one point, or OK 455 | if (norm(this->otherCharacter[0] - this->otherCharacter[1]) < 100) { 456 | //seems like markers degenrated 457 | tracking = false; 458 | } 459 | } 460 | 461 | 462 | #define DRAW_CROSS(img,pt) line(img,pt-Point(5,0),pt+Point(5,0),Scalar(0,255,0),2); \ 463 | line(img,pt-Point(0,5),pt+Point(0,5),Scalar(0,255,0),2); 464 | 465 | if (otherCharacter.size() >= 2) { 466 | //Kalman filter to smooth position of markers 467 | if(!kalman_setup) setupKalmanFilter(); 468 | KalmanSmooth(); 469 | 470 | //look for extra marker 471 | other_extra_marker_found = FindExtraMarker(otherCharacter); 472 | 473 | if(_debug) { 474 | DRAW_CROSS(img,this->otherCharacter[0]) 475 | DRAW_CROSS(img,this->otherCharacter[1]) 476 | } 477 | 478 | state[0] = 1; 479 | } 480 | 481 | if (// both characters visible 482 | otherCharacter.size()>=2 && selfCharacter.size()>=2 && 483 | // good vertical alignment 484 | fabs(getSelfCenter().y - getOtherCenter().y) < 10.0f) { 485 | //increase alignment timer 486 | waveTimer = MIN(waveTimer + 1,30); 487 | 488 | } else { 489 | waveTimer = MAX(0,waveTimer - 1); 490 | } 491 | 492 | 493 | if(_debug) { 494 | if (shouldResize) { 495 | resize(img,_img,_img.size()); //so we'll have some feedback on screen 496 | } else { 497 | img.copyTo(_img); 498 | } 499 | } 500 | 501 | return state; 502 | } 503 | 504 | bool Detector::FindExtraMarkerUsingBlobs(int i_am) { 505 | vector blobs; 506 | //get all the good colored good shaped blobs 507 | GetPointsUsingBlobs(blobs, img, hsv, true, (i_am==IAM_RED)?IAM_BLUE:IAM_RED, false); 508 | 509 | if (blobs.size() != 3) { 510 | return false; //we can only work if we find exactly 3 blobs.. 511 | } 512 | 513 | //look for 90-degree angle between the three 514 | //there can be three configurations: 1-2-3, 2-3-1, 3-1-2 515 | for (int i=0; i<3; i++) { 516 | Vec2f a = Point2Vec2f(blobs[i] - blobs[(i+1)%3]); 517 | float na = norm(a); 518 | Vec2f an = a * (1.0f / na); 519 | Vec2f b = Point2Vec2f(blobs[(i+1)%3] - blobs[(i+2)%3]); 520 | float nb = norm(b); 521 | Vec2f bn = b * (1.0f / nb); 522 | 523 | #ifdef _PC_COMPILE 524 | Mat tmp; img.copyTo(tmp); 525 | line(tmp, blobs[i], blobs[(i+1)%3], Scalar(255), 2); 526 | line(tmp,blobs[(i+1)%3],blobs[(i+2)%3],Scalar(0,255),2); 527 | 528 | stringstream ss; ss<<"abs(dotp): "< nb) { 544 | selfCharacter[0] = blobs[i]; 545 | selfCharacter[1] = blobs[(i+1)%3]; 546 | } else { 547 | selfCharacter[0] = blobs[(i+1)%3]; 548 | selfCharacter[1] = blobs[(i+2)%3]; 549 | } 550 | 551 | return true; 552 | } 553 | } 554 | return false; 555 | } 556 | 557 | bool Detector::FindExtraMarker(vector& pts) { 558 | Vec2f pa = Point2Vec2f(pts[0]) - Point2Vec2f(pts[1]); //principle_axis 559 | float angle = atan2(3.0, 4.0); //the angle between the diagonal and length 560 | //get the vector from the upper marker to the place of the extra-marker 561 | Vec2f rotated_upper(pa[0]*cos(angle)+pa[1]*(-sin(angle)), pa[0]*sin(angle)+pa[1]*cos(angle)); 562 | rotated_upper = rotated_upper * 0.8; // 4/5 is the ratio between the diagonal and the length of the rectangle 563 | 564 | Point extraMarkerPoint = pts[1]+Vec2f2Point(rotated_upper); 565 | if(!extraMarkerPoint.inside(Rect(0,0,img.cols,img.rows))) return false; 566 | 567 | #ifdef _PC_COMPILE 568 | // line(img, otherCharacter[0], otherCharacter[0]+Vec2f2Point(rotated_lower), Scalar(0,0,255), 2); 569 | // line(img, otherCharacter[1], extraMarkerPoint, Scalar(0,0,255), 2); 570 | // line(img, otherCharacter[0], extraMarkerPoint, Scalar(0,0,255), 2); 571 | circle(img, extraMarkerPoint, 10, Scalar(255), 1); 572 | #endif 573 | 574 | //compare histogram of colors within this area to histogram of known marker color 575 | Mat _hist; 576 | if(!hue.data) return false; //images not setup properly 577 | 578 | Mat roi = hsv(Rect(extraMarkerPoint.x-10,extraMarkerPoint.y-10,20,20)&Rect(0,0,hsv.cols,hsv.rows)); 579 | // Mat maskRoi = Mat::ones(roi.size(),CV_8UC1) * 255; 580 | int channels[3] = {1,1,1}; 581 | calcHist(&roi, 1, channels, Mat(), _hist, 3, hsize, phranges); 582 | 583 | roi = hsv(Rect(pts[0].x-10,pts[0].y-10,20,20)&Rect(0,0,hsv.cols,hsv.rows)); 584 | calcHist(&roi, 1, channels, Mat(), hist, 3, hsize, phranges); 585 | 586 | double chisqr_test = compareHist(_hist, hist, CV_COMP_CHISQR); 587 | bool extra_marker_found = (chisqr_test < 50.0); 588 | 589 | #ifdef _PC_COMPILE 590 | if (extra_marker_found) { 591 | putText(img, "EXTRA MARKER", Point(20,20), CV_FONT_HERSHEY_PLAIN, 2.0, Scalar(255), 2); 592 | } 593 | #endif 594 | 595 | return extra_marker_found; 596 | } 597 | 598 | /* 599 | void _GetCandidatePoints(vector& points, Mat& img, Mat& gray, Mat& hsv) { 600 | vector corners; 601 | points.clear(); 602 | 603 | goodFeaturesToTrack(gray, corners, 150, 0.01, 50.0); 604 | for (int i=0; i(corners[i]); 606 | if(hsvv[2] > 200 && hsvv[1] < 50) { //lightly saturated and high value 607 | circle(img, corners[i], 3, Scalar(0,255,0), 2); 608 | stringstream ss; ss << "(" << (int)hsvv.val[0] << "," << (int)hsvv.val[1] << "," << (int)hsvv.val[2] << ")"; 609 | putText(img, ss.str(), corners[i], CV_FONT_HERSHEY_PLAIN, 1.0, Scalar(255), 1); 610 | 611 | points.push_back(corners[i]); 612 | } 613 | } 614 | } 615 | 616 | Point2d Detector::_findCharacter(int idx, image_pool* pool, bool _flip, bool _debug) { 617 | Mat *_img = pool->getImage(idx), 618 | img, 619 | gray, 620 | hsv; 621 | 622 | if(!_img) return Point2d(-1,-1); 623 | 624 | resize(*_img,img,Size(),0.5,0.5); 625 | 626 | //rotate 90 degrees CCW 627 | double angle = -90.0; 628 | Point2f src_center(img.rows/2.0, img.rows/2.0); 629 | Mat rot_mat = getRotationMatrix2D(src_center, angle, 1.0); 630 | Mat dst; 631 | warpAffine(img, dst, rot_mat, Size(img.rows,img.cols)); 632 | if(_flip) flip(dst,dst,0); 633 | dst.copyTo(img); 634 | 635 | cvtColor(img, gray, CV_RGB2GRAY); 636 | cvtColor(img, hsv, CV_BGR2HSV); 637 | 638 | GaussianBlur(gray, gray, Size(7,7), 3.0); 639 | 640 | vector points; 641 | GetCandidatePoints(points, img, gray, hsv); 642 | 643 | resize(img,dst,_img->size()); //so we'll have some feedback on screen 644 | dst.copyTo(*_img); 645 | 646 | if (points.size() >= 3) { 647 | //TODO: see that these are the points we are looking for 648 | 649 | //get center point 650 | Scalar s = mean(Mat(points)); 651 | return Point2d(s[0]/img.cols,s[1]/img.rows); 652 | } else { 653 | return Point2d(-1,-1); 654 | } 655 | } 656 | */ -------------------------------------------------------------------------------- /src/edu/mit/media/fluid/royshil/headfollower/HeadFollower.java: -------------------------------------------------------------------------------- 1 | package edu.mit.media.fluid.royshil.headfollower; 2 | 3 | import android.app.Activity; 4 | import android.app.AlertDialog; 5 | import android.app.Dialog; 6 | import android.content.DialogInterface; 7 | import android.graphics.Color; 8 | import android.graphics.drawable.GradientDrawable; 9 | import android.os.Bundle; 10 | import android.util.Log; 11 | import android.view.Gravity; 12 | import android.view.Menu; 13 | import android.view.MenuInflater; 14 | import android.view.MenuItem; 15 | import android.view.View; 16 | import android.view.Window; 17 | import android.view.WindowManager; 18 | import android.widget.ImageView; 19 | import android.widget.RelativeLayout; 20 | import android.widget.SeekBar; 21 | import android.widget.SeekBar.OnSeekBarChangeListener; 22 | import android.widget.Toast; 23 | import edu.mit.media.fluid.royshil.graphics.InteractionBar; 24 | import edu.mit.media.fluid.royshil.graphics.MyAnimations; 25 | import edu.mit.media.fluid.royshil.graphics.MyAnimations.Character; 26 | import edu.mit.media.fluid.royshil.graphics.MyCanvasView; 27 | 28 | public class HeadFollower extends Activity implements android.view.View.OnClickListener, OnSeekBarChangeListener, ICharacterStateHandler, IMarkerShower { 29 | private static final String TAG = "HeadFollower"; 30 | private static final int SETTINGS_DIALOG = 99; 31 | 32 | public boolean mDebug; 33 | public boolean mFlip; 34 | public boolean mOpenCV = false; 35 | public boolean mRedOrBlue = true; 36 | public boolean mTouchToColor = true; 37 | 38 | private boolean mLookingToTheRight = true; //true == looking right, false == looking left 39 | 40 | Character currentCharacter = MyAnimations.Character.RED; 41 | 42 | // private SurfaceView mCharPreview; 43 | // private SurfaceHolder holder; 44 | // private Bundle extras; 45 | // private MediaPlayer mMediaPlayer; 46 | // private boolean mIsVideoReadyToBePlayed; 47 | 48 | @Override 49 | public boolean onCreateOptionsMenu(Menu menu) { 50 | MenuInflater inflater = getMenuInflater(); 51 | inflater.inflate(R.menu.options_menu, menu); 52 | return true; 53 | } 54 | 55 | @Override 56 | public boolean onOptionsItemSelected(MenuItem item) { 57 | switch (item.getItemId()) { 58 | case R.id.showOpenCV: 59 | toggleOpenCV(); 60 | return true; 61 | // case R.id.showsensors_item: 62 | // Intent myIntent = new Intent(); 63 | // myIntent.setClassName("edu.mit.media.fluid.royshil.headfollower", "edu.mit.media.fluid.royshil.headfollower.Sensors"); 64 | // startActivity(myIntent); 65 | // return true; 66 | case R.id.quit_item: 67 | finish(); 68 | return true; 69 | case R.id.toggleFlip_item: 70 | mFlip = !mFlip; 71 | return true; 72 | case R.id.showDebugParts: 73 | mDebug = !mDebug; 74 | return true; 75 | case R.id.openSettings_itm: 76 | showDialog(SETTINGS_DIALOG); 77 | return true; 78 | case R.id.lookLtoR_item: 79 | showChooseAnimDialog(); 80 | return true; 81 | default: 82 | return super.onOptionsItemSelected(item); 83 | } 84 | } 85 | 86 | private void toggleOpenCV() { 87 | RelativeLayout rl = (RelativeLayout)findViewById(R.id.charactercenterview); 88 | if(mOpenCV) { //remove OpenCV view 89 | View calibration_or_character = findViewById( 90 | cview.getCurrentState() == CharacterTrackerView.State.CALIBRATING_NO_MARKERS_FOUND ? R.id.calibration_text_background : R.id.mycanvas 91 | ); //if calibrating, bring the calibration text forward, else bring character 92 | rl.bringChildToFront(calibration_or_character); 93 | mOpenCV = false; 94 | } else { //install OpenCV view 95 | rl.bringChildToFront(findViewById(R.id.drawtracker)); 96 | mOpenCV = true; 97 | } 98 | rl.invalidate(); 99 | } 100 | 101 | @Override 102 | protected Dialog onCreateDialog(int id) { 103 | if(id==SETTINGS_DIALOG) { 104 | AlertDialog.Builder builder = new AlertDialog.Builder(this); 105 | builder.setTitle("ON/OFF Settings"); 106 | 107 | final String touch_to_color = "Touch to color"; 108 | final String red_markers = "Red markers"; 109 | final String[] items = new String[] {touch_to_color, red_markers}; 110 | boolean[] checked = new boolean[] {mTouchToColor,mRedOrBlue}; 111 | 112 | builder.setMultiChoiceItems(items, checked, new DialogInterface.OnMultiChoiceClickListener() { 113 | @Override 114 | public void onClick(DialogInterface dialog, int which, boolean isChecked) { 115 | if(items[which].equals(touch_to_color)) mTouchToColor = isChecked; 116 | if(items[which].equals(red_markers)) { mRedOrBlue = isChecked; setRedBlue(); } 117 | } 118 | }); 119 | 120 | return builder.create(); 121 | } 122 | return null; 123 | } 124 | 125 | private void flipRedBlue() { 126 | mRedOrBlue = !mRedOrBlue; 127 | cview.setI_am(mRedOrBlue ? 2 : 1); //RED = 2, BLUE = 1 128 | setRedBlue(); 129 | } 130 | 131 | private void setRedBlue() { 132 | ImageView imageView = (ImageView)findViewById(R.id.bl_circle); 133 | ImageView imageView3 = (ImageView)findViewById(R.id.tr_circle); 134 | if(!mRedOrBlue) { 135 | imageView.setBackgroundResource(R.drawable.blue_markercircle); 136 | imageView3.setBackgroundResource(R.drawable.blue_markercircle); 137 | } else { 138 | imageView.setBackgroundResource(R.drawable.red_markercircle); 139 | imageView3.setBackgroundResource(R.drawable.red_markercircle); 140 | } 141 | imageView.postInvalidate(); 142 | imageView3.postInvalidate(); 143 | 144 | mLookingToTheRight = true;//looking right. 145 | 146 | currentCharacter = (mRedOrBlue) ? Character.RED : Character.BLUE; 147 | 148 | mcv.fireAnimation(MyAnimations.getAnimation(MyAnimations.Animations.NATURAL, currentCharacter), false); 149 | } 150 | 151 | /** Called when the activity is first created. */ 152 | @Override 153 | public void onCreate(Bundle savedInstanceState) { 154 | super.onCreate(savedInstanceState); 155 | requestWindowFeature(Window.FEATURE_NO_TITLE); 156 | getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,WindowManager.LayoutParams.FLAG_FULLSCREEN); 157 | 158 | setContentView(R.layout.main); 159 | 160 | // TransformableImageView headImgView = (TransformableImageView)findViewById(R.id.head_img); 161 | // headImgView.setOnClickListener(this); 162 | 163 | 164 | // initOpenCVViews(); 165 | 166 | // ((SeekBar)findViewById(R.id.hueSeek)).setOnSeekBarChangeListener(this); 167 | // 168 | // float hsv[] = new float[3]; 169 | // android.graphics.Color.RGBToHSV(255, 0, 0, hsv); 170 | // ((SeekBar)findViewById(R.id.hueSeek)).setProgress((int)hsv[0]); 171 | 172 | findViewById(R.id.tr_circle).setOnClickListener(this); 173 | findViewById(R.id.bl_circle).setOnClickListener(this); 174 | 175 | // WebView wb = (WebView) findViewById(R.id.webview); 176 | //// wb.setOnClickListener(this); 177 | // wb.setBackgroundColor(0); 178 | //// wb.loadDataWithBaseURL("fake://dagnabbit", 179 | //// "
", 180 | //// "text/html", 181 | //// "UTF-8", 182 | //// "fake://lala"); 183 | //// fireAnimation(getAnimationsIndex().get(Animations.NATURAL),false); 184 | //// wb.loadUrl("file:///android_asset/animate.html?anim_file=girlshake0&first=160&last=235"); 185 | // 186 | // WebSettings webSettings = wb.getSettings(); 187 | // webSettings.setJavaScriptEnabled(true); 188 | // webSettings.setSupportZoom(false); 189 | 190 | mcv = (MyCanvasView) findViewById(R.id.mycanvas); 191 | mcv.fireAnimation(MyAnimations.getAnimation(MyAnimations.Animations.NATURAL, currentCharacter), false); 192 | 193 | cview = (CharacterTrackerView) findViewById(R.id.charactertracker); 194 | BitmapDrawerSurfaceView dtv = (BitmapDrawerSurfaceView)findViewById(R.id.drawtracker); 195 | cview.setBitmapHolder(dtv); 196 | cview.setmStateHandler(this); 197 | cview.setmMarkerShower(this); 198 | 199 | RelativeLayout rl = (RelativeLayout)findViewById(R.id.mainFrameLayout); 200 | rl.bringChildToFront(findViewById(R.id.charcterView)); 201 | 202 | // mCharPreview = (SurfaceView) findViewById(R.id.mysurfaceview); 203 | // holder = mCharPreview.getHolder(); 204 | // holder.addCallback(this); 205 | //// holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); 206 | // extras = getIntent().getExtras(); 207 | // 208 | // mMediaPlayer = new MediaPlayer(); 209 | // mMediaPlayer.setDisplay(holder); 210 | // mMediaPlayer.setOnCompletionListener(this); 211 | // mMediaPlayer.setOnPreparedListener(this); 212 | // 213 | // mMediaPlayer.setOnBufferingUpdateListener(this); 214 | // mMediaPlayer.setOnVideoSizeChangedListener(this); 215 | // mMediaPlayer.setAudioStreamType(AudioManager.); 216 | 217 | } 218 | 219 | // private void initOpenCVViews() { 220 | // // Create our Preview view and set it as the content of our activity. 221 | // mPreview = new NativePreviewer(getApplication(), 640, 480); 222 | // 223 | // // RotateAnimation rotateAnimation = new RotateAnimation(0.0f, 90.0f, 224 | // // getWindowManager().getDefaultDisplay().getHeight() /2.0f, 225 | // // getWindowManager().getDefaultDisplay().getWidth() / 2.0f); 226 | // // rotateAnimation.setFillAfter(true); 227 | // // rotateAnimation.setDuration(1000); 228 | // // mPreview.setAnimation(rotateAnimation); 229 | // 230 | // LayoutParams params = new LayoutParams(LayoutParams.WRAP_CONTENT,LayoutParams.WRAP_CONTENT); 231 | // // params.height = getWindowManager().getDefaultDisplay().getHeight(); 232 | // // params.width = (int) (params.height * 4.0 / 2.88); 233 | // 234 | // LinearLayout vidlay = new LinearLayout(getApplication()); 235 | // 236 | // vidlay.setGravity(Gravity.CENTER); 237 | // vidlay.addView(mPreview, params); 238 | // 239 | // // make the glview overlay ontop of video preview 240 | // mPreview.setZOrderMediaOverlay(false); 241 | // 242 | // // RelativeLayout relativeLayout = (RelativeLayout) 243 | // // findViewById(R.id.mainFrameLayout); 244 | // // relativeLayout.addView(vidlay); 245 | // 246 | // glview = new GL2CameraViewer(getApplication(), false, 0, 0); 247 | // glview.setZOrderMediaOverlay(true); 248 | // glview.setLayoutParams(new LayoutParams(LayoutParams.WRAP_CONTENT,LayoutParams.WRAP_CONTENT)); 249 | // 250 | // // relativeLayout.bringChildToFront(findViewById(R.id.crossandtext)); 251 | // 252 | // LinkedList defaultcallbackstack = new LinkedList(); 253 | // defaultcallbackstack.addFirst(glview.getDrawCallback()); 254 | //// defaultcallbackstack.addFirst(new CharacterProcessor()); 255 | // mPreview.addCallbackStack(defaultcallbackstack); 256 | // 257 | // RelativeLayout rl = (RelativeLayout)findViewById(R.id.mainFrameLayout); 258 | //// rl.addView(vidlay); 259 | //// rl.addView(glview); 260 | // rl.bringChildToFront(findViewById(R.id.charcterView)); 261 | // } 262 | 263 | // @Override 264 | // protected void onPause() { 265 | // super.onPause(); 266 | // mPreview.onPause(); 267 | // glview.onPause(); 268 | // } 269 | // 270 | // @Override 271 | // protected void onResume() { 272 | // super.onResume(); 273 | // glview.onResume(); 274 | // mPreview.onResume(); 275 | // } 276 | // 277 | //// class CharacterProcessor implements NativeProcessor.PoolCallback { 278 | // @Override 279 | // public void process(int idx, image_pool pool, long timestamp, NativeProcessor nativeProcessor) { 280 | // if(processor.findCharacter(idx, pool, (mRedOrBlue)?1:2, mFlip, mDebug)) { 281 | // //found friend 282 | // 283 | // //adjust size 284 | //// WebView wb = (WebView) findViewById(R.id.webview); 285 | //// wb.loadUrl("javascript:document.getElementById('im').style.webkitTransform='scaleX(" + (float) processor.getSizeOfSelf() + ") scaleY(" + (float) processor.getSizeOfSelf() + ")';"); 286 | // 287 | //// final TransformableImageView transformableImageView = (TransformableImageView)findViewById(R.id.head_img); 288 | //// transformableImageView.post(new Runnable() { 289 | //// @Override 290 | //// public void run() { 291 | //// transformableImageView.scale = (float) processor.getSizeOfSelf(); 292 | //// transformableImageView.invalidate(); 293 | //// } 294 | //// }); 295 | // 296 | // //look in the right direction 297 | // if ( processor.getPtX(processor.getOtherCenter()) > processor.getPtX(processor.getSelfCenter())) { 298 | // if(!mLooking) toggleLooking(); 299 | // } else { 300 | // if(mLooking) toggleLooking(); 301 | // } 302 | // 303 | //// TransformableImageView headImg = (TransformableImageView) findViewById(R.id.head_img); 304 | //// AnimationDrawable anim = (AnimationDrawable) headImg.getDrawable(); 305 | //// anim.start(); 306 | //// boolean doneTurning = !anim.isRunning(); 307 | // boolean doneTurning = true; 308 | // 309 | // boolean waveTimerDue = processor.getWaveTimer() > 15; 310 | // 311 | // if(doneTurning && waveTimerDue) { 312 | //// fireAnimation(getAnimationsIndex().get(Animations.WAVE),false); 313 | // mcv.fireAnimation(MyAnimations.getAnimation(MyAnimations.Animations.WAVE, MyAnimations.Character.BLUE), false); 314 | // } 315 | // } 316 | // } 317 | // } 318 | 319 | private void toggleLooking() { 320 | mcv.fireAnimation(MyAnimations.getAnimation(MyAnimations.Animations.TURN, currentCharacter), true); 321 | } 322 | 323 | private MyCanvasView mcv; 324 | private CharacterTrackerView cview; 325 | protected int farInteractionCounter = 0; 326 | protected int closeInteractionCounter = 0; 327 | protected int nonInteractionCount = 0; 328 | 329 | @Override 330 | public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { 331 | // if(fromUser && seekBar.getTag().equals("hueSeek")) { 332 | // int rgbi = Color.HSVToColor(new float[] {(float)progress,1.0f,1.0f}); 333 | //// ((TextView)findViewById(R.id.debugTxt)).setText("rgb: " + (rgbi & 0x000000ff) + "," + (rgbi >> 8 & 0x000000ff) + "," + (rgbi >> 16 & 0x000000ff)); 334 | // } 335 | } 336 | @Override 337 | public void onClick(View v) { 338 | if(v.getId() == R.id.tr_circle) 339 | showChooseAnimDialog(); 340 | if(v.getId() == R.id.bl_circle) 341 | mcv.fireAnimation(MyAnimations.getAnimation(MyAnimations.Animations.END_WALK, currentCharacter), false); 342 | // fireAnimation(getAnimationsIndex().get(Animations.END_WALK), false); 343 | } 344 | 345 | private void showChooseAnimDialog() { 346 | final CharSequence[] items = {"Flip Red-Blue","Toggle OpenCV","Disable Tracking", "Recalibrate", "Shake Hands", "Turn Right-Left", "Wave hand", "Start walk", "Walk", "End walk", "Natural pose"}; 347 | 348 | AlertDialog.Builder builder = new AlertDialog.Builder(this); 349 | builder.setTitle("Pick a color"); 350 | builder.setItems(items, new DialogInterface.OnClickListener() { 351 | public void onClick(DialogInterface dialog, int item) { 352 | Toast.makeText(getApplicationContext(), items[item], Toast.LENGTH_SHORT).show(); 353 | switch (item) { 354 | case 0: 355 | flipRedBlue(); 356 | break; 357 | case 1: 358 | toggleOpenCV(); 359 | break; 360 | case 2: 361 | cview.disableTracking(); 362 | break; 363 | case 3: 364 | cview.recalibrate(); 365 | break; 366 | case 4: 367 | mcv.fireAnimation(MyAnimations.getAnimation(MyAnimations.Animations.SHAKE_HAND, currentCharacter), false); 368 | break; 369 | case 5: 370 | toggleLooking(); 371 | break; 372 | case 6: 373 | mcv.fireAnimation(MyAnimations.getAnimation(MyAnimations.Animations.WAVE, currentCharacter), false); 374 | break; 375 | case 7: 376 | mcv.fireAnimation(MyAnimations.getAnimation(MyAnimations.Animations.START_WALK, currentCharacter), false); 377 | break; 378 | case 8: 379 | mcv.fireAnimation(MyAnimations.getAnimation(MyAnimations.Animations.WALK, currentCharacter), false); 380 | break; 381 | case 9: 382 | mcv.fireAnimation(MyAnimations.getAnimation(MyAnimations.Animations.END_WALK, currentCharacter), false); 383 | break; 384 | case 10: 385 | mcv.fireAnimation(MyAnimations.getAnimation(MyAnimations.Animations.NATURAL, currentCharacter), false); 386 | break; 387 | default: 388 | break; 389 | } 390 | } 391 | }); 392 | AlertDialog alert = builder.create(); 393 | alert.show(); 394 | } 395 | 396 | @Override 397 | public void onStartTrackingTouch(SeekBar seekBar) {} 398 | 399 | @Override 400 | public void onStopTrackingTouch(SeekBar seekBar) { 401 | if(seekBar.getTag().equals("hueSeek")) { 402 | GradientDrawable sd = (GradientDrawable)getResources().getDrawable((mRedOrBlue) ? R.drawable.red_markercircle : R.drawable.blue_markercircle); 403 | sd.setColor(Color.HSVToColor(new float[] {(float)seekBar.getProgress(),1.0f,1.0f})); 404 | 405 | findViewById(R.id.bl_circle).postInvalidate(); 406 | findViewById(R.id.tr_circle).postInvalidate(); 407 | } 408 | } 409 | 410 | /* (non-Javadoc) 411 | * @see edu.mit.media.fluid.royshil.headfollower.ICharacterStateHandler#onCharacterStateChanged(float[]) 412 | */ 413 | @Override 414 | public void onCharacterStateChanged(final float[] state) { 415 | //11-floats: 416 | //state[0] = self point1 X 417 | //state[1] = self point1 Y 418 | //state[2] = self point2 X 419 | //state[3] = self point2 Y 420 | //state[4] = other point1 X 421 | //state[5] = other point1 Y 422 | //state[6] = other point2 X 423 | //state[7] = other point2 Y 424 | //state[8] = wave timer 425 | //state[9] = is tracking 426 | //state[10] = self size 427 | 428 | mcv.post(new Runnable() { 429 | @Override 430 | public void run() { 431 | if(state[4] > 0.0f && state[5] > 0.0f && state[6] > 0.0f && state[6] > 0.0f) { 432 | //other character recognized 433 | Log.i(TAG,"other character in sight"); 434 | 435 | mcv.setRotationAndScale(0.0f, state[10]); 436 | 437 | float midx_self = (state[0]+state[2])/2; 438 | float midx_other = (state[4]+state[6])/2; 439 | if(midx_self < midx_other) { //other character to the left! 440 | if(mLookingToTheRight) { 441 | toggleLooking(); 442 | mLookingToTheRight = false; 443 | // mcv.setmLookingRight(false); 444 | } 445 | } else { //other character to the right! 446 | if(!mLookingToTheRight) { 447 | toggleLooking(); 448 | mLookingToTheRight = true; 449 | // mcv.setmLookingRight(true); 450 | } 451 | } 452 | 453 | float distance = Math.abs(midx_other-midx_self); 454 | Log.i(TAG,"distance: "+distance); 455 | if(distance < 100) { 456 | farInteractionCounter = Math.min(farInteractionCounter + 1,10); 457 | nonInteractionCount = Math.max(0, nonInteractionCount-1); 458 | if(distance < 25) { 459 | closeInteractionCounter = Math.min(closeInteractionCounter + 1,10); 460 | } else { 461 | closeInteractionCounter = Math.max(closeInteractionCounter - 1, 0); 462 | } 463 | } else { 464 | farInteractionCounter = Math.max(farInteractionCounter - 1, 0); 465 | closeInteractionCounter = Math.max(closeInteractionCounter - 1, 0); 466 | } 467 | 468 | if(farInteractionCounter >= 10 && closeInteractionCounter < 5) { 469 | //fire long-distance animation: hand wave 470 | mcv.fireAnimation(MyAnimations.getAnimation(MyAnimations.Animations.WAVE, currentCharacter), false); 471 | } else if(farInteractionCounter >= 10 && closeInteractionCounter >= 10) { 472 | mcv.fireAnimation(MyAnimations.getAnimation(MyAnimations.Animations.SHAKE_HAND, currentCharacter), false); 473 | } 474 | } else { 475 | farInteractionCounter = Math.max(farInteractionCounter - 1, 0); 476 | closeInteractionCounter = Math.max(closeInteractionCounter - 1, 0); 477 | nonInteractionCount = Math.min(nonInteractionCount + 1,30); 478 | } 479 | InteractionBar far = (InteractionBar) findViewById(R.id.interactionbar1); 480 | far.setValue(farInteractionCounter); 481 | InteractionBar close = (InteractionBar) findViewById(R.id.interactionbar2); 482 | close.setValue(closeInteractionCounter); 483 | if(nonInteractionCount >= 30) { 484 | Toast t = Toast.makeText(getApplicationContext(), "Put us together, if you want us to play!", 5); 485 | t.setGravity(Gravity.BOTTOM, 0, 0); 486 | t.show(); 487 | nonInteractionCount = 0; 488 | } 489 | } 490 | }); 491 | } 492 | 493 | @Override 494 | public void showMarker() { 495 | final View extra = findViewById(R.id.extra_marker); 496 | extra.post(new Runnable() { 497 | @Override 498 | public void run() { 499 | extra.setVisibility(View.VISIBLE); 500 | } 501 | }); 502 | } 503 | 504 | @Override 505 | public void removeMarker() { 506 | final View extra = findViewById(R.id.extra_marker); 507 | extra.post(new Runnable() { 508 | @Override 509 | public void run() { 510 | extra.setVisibility(View.INVISIBLE); 511 | } 512 | }); 513 | } 514 | 515 | @Override 516 | public void showCharacter() { 517 | final View mycanvas = findViewById(R.id.mycanvas); 518 | mycanvas.post(new Runnable() { 519 | @Override 520 | public void run() { 521 | mycanvas.setVisibility(View.VISIBLE); 522 | findViewById(R.id.calibration_text_background).setVisibility(View.INVISIBLE); 523 | RelativeLayout rl = (RelativeLayout)findViewById(R.id.charactercenterview); 524 | rl.bringChildToFront(mycanvas); 525 | } 526 | }); 527 | } 528 | 529 | @Override 530 | public void showCalibrationMessage() { 531 | final RelativeLayout rl = (RelativeLayout)findViewById(R.id.charactercenterview); 532 | rl.post(new Runnable() { 533 | @Override 534 | public void run() { 535 | View calib_text_view = findViewById(R.id.calibration_text_background); 536 | calib_text_view.setVisibility(View.VISIBLE); 537 | rl.bringChildToFront(calib_text_view); 538 | } 539 | }); 540 | } 541 | 542 | @Override 543 | public void onCalibrationStateChanged(final int[] state) { 544 | final InteractionBar far = (InteractionBar)findViewById(R.id.interactionbar1); 545 | far.post(new Runnable() { 546 | @Override 547 | public void run() { 548 | far.setMax(10.0f); 549 | far.setValue((float)(state[1])); 550 | InteractionBar close = (InteractionBar) findViewById(R.id.interactionbar2); 551 | close.setMax(2.0f); 552 | close.setValue((float)(state[3])); 553 | } 554 | }); 555 | } 556 | 557 | // @Override 558 | // public void surfaceChanged(SurfaceHolder holder, int format, int width, 559 | // int height) { 560 | // 561 | // } 562 | // 563 | // @Override 564 | // public void surfaceCreated(SurfaceHolder holder) { 565 | // try { 566 | // AssetFileDescriptor openFd = getAssets().openFd("girlturn.gif"); 567 | // mMediaPlayer.setDataSource(openFd.getFileDescriptor()); 568 | // mMediaPlayer.prepare(); 569 | // } catch (Exception e) { 570 | // e.printStackTrace(); 571 | // (new AlertDialog.Builder(this)).setTitle("Exception").setMessage(e.getClass().getName() + ":" + e.getLocalizedMessage()).create().show(); 572 | // } 573 | // } 574 | // 575 | // @Override 576 | // public void surfaceDestroyed(SurfaceHolder holder) { 577 | // 578 | // } 579 | // 580 | // @Override 581 | // public void onCompletion(MediaPlayer mp) { 582 | // 583 | // } 584 | // 585 | // private void startVideoPlayback() { 586 | // Log.v(LOG_TAG, "startVideoPlayback"); 587 | // holder.setFixedSize(200, 300); 588 | // mMediaPlayer.start(); 589 | // } 590 | // 591 | // public void onPrepared(MediaPlayer mediaplayer) { 592 | // Log.d(LOG_TAG, "onPrepared called"); 593 | // mIsVideoReadyToBePlayed = true; 594 | // if (mIsVideoReadyToBePlayed/* && mIsVideoSizeKnown*/) { 595 | // startVideoPlayback(); 596 | // } 597 | // } 598 | } --------------------------------------------------------------------------------