├── .gitignore ├── README.md ├── app ├── app.iml ├── build.gradle ├── proguard-rules.pro └── src │ ├── androidTest │ └── java │ │ └── android │ │ └── imagerecognizer │ │ └── app │ │ └── ApplicationTest.java │ └── main │ ├── AndroidManifest.xml │ ├── java │ ├── neural │ │ └── imagerecognizer │ │ │ └── app │ │ │ ├── RecognitionApp.java │ │ │ ├── nn │ │ │ ├── NNManager.java │ │ │ └── TensorMaker.java │ │ │ ├── ui │ │ │ ├── activities │ │ │ │ ├── BaseActivity.java │ │ │ │ ├── MainActivity.java │ │ │ │ └── SettingsActivity.java │ │ │ └── views │ │ │ │ ├── PaintView.java │ │ │ │ └── WhatisButton.java │ │ │ └── util │ │ │ ├── AppUncaughtExceptionHandler.java │ │ │ ├── ThreadManager.java │ │ │ ├── ToastImageDescription.java │ │ │ └── Tool.java │ └── org │ │ └── dmlc │ │ └── mxnet │ │ ├── MxnetException.java │ │ └── Predictor.java │ ├── jniLibs │ └── armeabi │ │ └── libmxnet_predict.so │ └── res │ ├── drawable-hdpi │ ├── ic_eraser_variant.png │ └── ic_lead_pencil.png │ ├── drawable-ldpi │ ├── ic_eraser_variant.png │ └── ic_lead_pencil.png │ ├── drawable-mdpi │ ├── ic_eraser_variant.png │ └── ic_lead_pencil.png │ ├── drawable-xhdpi │ ├── ic_eraser_variant.png │ └── ic_lead_pencil.png │ ├── drawable-xxhdpi │ ├── ic_eraser_variant.png │ └── ic_lead_pencil.png │ ├── drawable-xxxhdpi │ ├── ic_eraser_variant.png │ └── ic_lead_pencil.png │ ├── drawable │ ├── gradient_black.xml │ └── toast_backround.xml │ ├── layout │ ├── activity_main.xml │ ├── activity_settings.xml │ └── toast_image_description.xml │ ├── menu │ └── menu_main.xml │ ├── mipmap-hdpi │ └── ic_launcher.png │ ├── mipmap-mdpi │ └── ic_launcher.png │ ├── mipmap-xhdpi │ └── ic_launcher.png │ ├── mipmap-xxhdpi │ └── ic_launcher.png │ ├── raw │ ├── mean.json │ ├── params │ ├── symbol.json │ └── synset.txt │ ├── values-w820dp │ └── dimens.xml │ └── values │ ├── colors.xml │ ├── dimens.xml │ ├── strings.xml │ └── styles.xml ├── build.gradle ├── gradle.properties ├── gradle └── wrapper │ ├── gradle-wrapper.jar │ └── gradle-wrapper.properties ├── gradlew ├── gradlew.bat ├── images └── Screenshot1.png ├── local.properties └── settings.gradle /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | .gitignore 3 | .idea/ 4 | build/ 5 | .gradle/ 6 | *.iml 7 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # ImageRecognizer-Android 2 | Image classification using neural networks (inception-bn) and [**MxNet**](https://github.com/dmlc/mxnet) (Python/C++ neural net library), implemented for Android. 3 | # 4 | nn/[*NNManager.java*](https://github.com/dneprDroid/ImageRecognizer/blob/master/app/src/main/java/neural/imagerecognizer/app/nn/NNManager.java) - class working with **MxNet** 5 | 6 | nn/[TensorMaker.java](https://github.com/dneprDroid/ImageRecognizer-Android/blob/master/app/src/main/java/neural/imagerecognizer/app/nn/TensorMaker.java) - tensor convertor 7 | 8 | Pre-trained model: 9 | 10 | *res/raw/params* - serialized data of the network (weights, convolutional kernels) 11 | 12 | *res/raw/symbol.json* - structure of the network 13 | 14 | *res/raw/syncet.txt* - word dictionary for network, pair output value - meaning word 15 | 16 | # 17 | 18 | 19 | # NDK library 20 | Build **libmxnet_predict.so** from official mxnet sources - https://github.com/dmlc/mxnet/tree/master/amalgamation 21 | 22 | # iOS 23 | iOS version - https://github.com/dneprDroid/ImageRecognizer-iOS 24 | 25 | # Links 26 | * https://github.com/dmlc/mxnet - MxNet library 27 | * https://culurciello.github.io/tech/2016/06/04/nets.html - architectures of neural nets, including inception-bn arch. 28 | * https://github.com/Trangle/mxnet-inception-v4 - inceprion network trainer 29 | 30 | -------------------------------------------------------------------------------- /app/app.iml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 8 | 9 | 10 | 11 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | -------------------------------------------------------------------------------- /app/build.gradle: -------------------------------------------------------------------------------- 1 | buildscript { 2 | repositories { 3 | jcenter() 4 | } 5 | dependencies { 6 | classpath 'com.android.tools.build:gradle:1.2.3' 7 | } 8 | } 9 | apply plugin: 'com.android.application' 10 | 11 | repositories { 12 | jcenter() 13 | } 14 | 15 | android { 16 | compileSdkVersion 23 17 | buildToolsVersion "23.0.2" 18 | 19 | defaultConfig { 20 | applicationId "neural.imagerecognizer.app" 21 | minSdkVersion 14 22 | targetSdkVersion 23 23 | versionCode 1 24 | versionName "1.0" 25 | } 26 | 27 | compileOptions { 28 | sourceCompatibility JavaVersion.VERSION_1_7 29 | targetCompatibility JavaVersion.VERSION_1_7 30 | } 31 | buildTypes { 32 | release { 33 | minifyEnabled false 34 | proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' 35 | } 36 | } 37 | } 38 | 39 | dependencies { 40 | compile fileTree(dir: 'libs', include: ['*.jar']) 41 | compile 'com.android.support:appcompat-v7:23.0.0' 42 | 43 | compile 'com.jakewharton:butterknife:7.0.1' 44 | 45 | //Camera 46 | compile 'com.github.boxme:squarecamera:1.1.0' 47 | } 48 | -------------------------------------------------------------------------------- /app/proguard-rules.pro: -------------------------------------------------------------------------------- 1 | # Add project specific ProGuard rules here. 2 | # By default, the flags in this file are appended to flags specified 3 | # in /Users/useruser/Library/Android/sdk/tools/proguard/proguard-android.txt 4 | # You can edit the include path and order by changing the proguardFiles 5 | # directive in build.gradle. 6 | # 7 | # For more details, see 8 | # http://developer.android.com/guide/developing/tools/proguard.html 9 | 10 | # Add any project specific keep options here: 11 | 12 | # If your project uses WebView with JS, uncomment the following 13 | # and specify the fully qualified class name to the JavaScript interface 14 | # class: 15 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview { 16 | # public *; 17 | #} 18 | -------------------------------------------------------------------------------- /app/src/androidTest/java/android/imagerecognizer/app/ApplicationTest.java: -------------------------------------------------------------------------------- 1 | package android.imagerecognizer.app; 2 | 3 | import android.app.Application; 4 | import android.test.ApplicationTestCase; 5 | 6 | /** 7 | * Testing Fundamentals 8 | */ 9 | public class ApplicationTest extends ApplicationTestCase { 10 | public ApplicationTest() { 11 | super(Application.class); 12 | } 13 | } -------------------------------------------------------------------------------- /app/src/main/AndroidManifest.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | 13 | 14 | 15 | 16 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | -------------------------------------------------------------------------------- /app/src/main/java/neural/imagerecognizer/app/RecognitionApp.java: -------------------------------------------------------------------------------- 1 | package neural.imagerecognizer.app; 2 | 3 | import android.app.Application; 4 | import neural.imagerecognizer.app.nn.NNManager; 5 | import neural.imagerecognizer.app.util.AppUncaughtExceptionHandler; 6 | import neural.imagerecognizer.app.util.ThreadManager; 7 | import neural.imagerecognizer.app.util.Tool; 8 | import org.dmlc.mxnet.Predictor; 9 | import org.json.JSONException; 10 | import org.json.JSONObject; 11 | 12 | import java.util.HashMap; 13 | import java.util.List; 14 | import java.util.Map; 15 | 16 | public class RecognitionApp extends Application { 17 | public static ThreadManager tm; 18 | private static RecognitionApp instance; 19 | 20 | 21 | @Override 22 | public void onCreate() { 23 | super.onCreate(); 24 | instance = this; 25 | 26 | tm = ThreadManager.getInstance(); 27 | Thread.setDefaultUncaughtExceptionHandler(new AppUncaughtExceptionHandler(this)); 28 | 29 | NNManager.init(); 30 | } 31 | 32 | public static RecognitionApp getInstance() { 33 | return instance; 34 | } 35 | 36 | @Override 37 | public void onTerminate() { 38 | super.onTerminate(); 39 | tm.end(); 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /app/src/main/java/neural/imagerecognizer/app/nn/NNManager.java: -------------------------------------------------------------------------------- 1 | package neural.imagerecognizer.app.nn; 2 | 3 | import android.graphics.Bitmap; 4 | 5 | import android.support.annotation.MainThread; 6 | import android.support.annotation.NonNull; 7 | import android.support.annotation.Nullable; 8 | import neural.imagerecognizer.app.R; 9 | import neural.imagerecognizer.app.RecognitionApp; 10 | import neural.imagerecognizer.app.util.ThreadManager; 11 | import neural.imagerecognizer.app.util.Tool; 12 | import org.dmlc.mxnet.Predictor; 13 | import org.json.JSONException; 14 | import org.json.JSONObject; 15 | 16 | import java.nio.ByteBuffer; 17 | import java.util.*; 18 | 19 | public final class NNManager { 20 | 21 | 22 | private List dict; 23 | private Map mean; 24 | 25 | private Predictor predictor; 26 | private static NNManager shared; 27 | 28 | private NNManager() { 29 | } 30 | 31 | public static synchronized NNManager shared() { 32 | if (shared == null) { 33 | shared = new NNManager(); 34 | shared.initMxNet(); 35 | } 36 | return shared; 37 | } 38 | 39 | private void initMxNet() { 40 | final byte[] symbol = Tool.readRawFile(R.raw.symbol); 41 | final byte[] params = Tool.readRawFile(R.raw.params); 42 | 43 | final Predictor.Device device = new Predictor.Device(Predictor.Device.Type.CPU, 0); 44 | //3 channel image on input 45 | final int[] shape = {1, 3, 224, 224}; 46 | final String key = "data"; 47 | final Predictor.InputNode node = new Predictor.InputNode(key, shape); 48 | 49 | predictor = new Predictor(symbol, params, device, new Predictor.InputNode[]{node}); 50 | dict = Tool.readRawTextFileAsList(R.raw.synset); 51 | try { 52 | final String meanStr = Tool.readRawTextFile(R.raw.mean); 53 | final JSONObject meanJson = new JSONObject(meanStr); 54 | mean = new HashMap<>(); 55 | mean.put("b", (float) meanJson.optDouble("b")); 56 | mean.put("g", (float) meanJson.optDouble("g")); 57 | mean.put("r", (float) meanJson.optDouble("r")); 58 | } catch (JSONException e) { 59 | e.printStackTrace(); 60 | } 61 | } 62 | 63 | public void identifyImage(final Bitmap bitmap, final Callback callback) { 64 | RecognitionApp.tm.execute(new ThreadManager.Executor() { 65 | @Nullable 66 | @Override 67 | public String onExecute() throws Exception { 68 | float[] imageTensor = TensorMaker.convertBitmapToTensor(bitmap, mean); 69 | predictor.forward("data", imageTensor); 70 | final float[] result = predictor.getOutput(0); 71 | 72 | int index = 0; 73 | for (int i = 0; i < result.length; i++) { 74 | if (result[index] < result[i]) index = i; 75 | } 76 | //Arrays.sort(result); 77 | String tag = getName(index); 78 | Tool.log("recognition competed: %s", tag); 79 | String[] arr = tag.split(" ", 2); 80 | return arr[1]; 81 | } 82 | 83 | @Override 84 | public void onCallback(@NonNull String data) { 85 | callback.onResult(data); 86 | } 87 | 88 | @Override 89 | public void onError(Exception e) { 90 | Tool.log("error of img recogn. : %s", e); 91 | Tool.showToast(R.string.toast_recognition_error); 92 | } 93 | }); 94 | } 95 | 96 | public static void init() { 97 | shared(); 98 | } 99 | 100 | 101 | private String getName(int i) { 102 | if (i >= dict.size()) { 103 | return RecognitionApp.getInstance().getString(R.string.text_image_not_recognized); 104 | } 105 | return dict.get(i); 106 | } 107 | 108 | public interface Callback { 109 | @MainThread 110 | void onResult(@NonNull String description); 111 | } 112 | 113 | } 114 | -------------------------------------------------------------------------------- /app/src/main/java/neural/imagerecognizer/app/nn/TensorMaker.java: -------------------------------------------------------------------------------- 1 | package neural.imagerecognizer.app.nn; 2 | 3 | 4 | import android.graphics.Bitmap; 5 | import neural.imagerecognizer.app.util.Tool; 6 | 7 | import java.nio.ByteBuffer; 8 | import java.nio.IntBuffer; 9 | import java.util.Map; 10 | 11 | public final class TensorMaker { 12 | private static final int SHORTER_SIDE = 256; 13 | private static final int DESIRED_SIDE = 224; // default image side for input in inception-bn network 14 | 15 | private TensorMaker() { 16 | } 17 | 18 | // todo: implement this via RenderScript 19 | public static float[] convertBitmapToTensor(Bitmap bitmap, Map mean) { 20 | Bitmap processedBitmap = processBitmap(bitmap); 21 | ByteBuffer byteBuffer = ByteBuffer.allocate(processedBitmap.getByteCount()); 22 | processedBitmap.copyPixelsToBuffer(byteBuffer); 23 | byte[] bytes = byteBuffer.array(); 24 | float[] colors = new float[bytes.length / 4 * 3]; 25 | 26 | float mean_b = mean.get("b"); 27 | float mean_g = mean.get("g"); 28 | float mean_r = mean.get("r"); 29 | for (int i = 0; i < bytes.length; i += 4) { 30 | int j = i / 4; 31 | colors[0 * DESIRED_SIDE * DESIRED_SIDE + j] = (bytes[i + 0] & 0xFF) - mean_r; // red 32 | colors[1 * DESIRED_SIDE * DESIRED_SIDE + j] = (bytes[i + 1] & 0xFF) - mean_g; // green 33 | colors[2 * DESIRED_SIDE * DESIRED_SIDE + j] = (bytes[i + 2] & 0xFF) - mean_b; // blue 34 | } 35 | return colors; 36 | } 37 | 38 | public static Bitmap convertTensorToBitmap(float[] imageTensor, Map mean) { 39 | float mean_b = mean.get("b"); 40 | float mean_g = mean.get("g"); 41 | float mean_r = mean.get("r"); 42 | byte[] imageBytes = new byte[imageTensor.length * 4 / 3]; 43 | for (int i = 0; i < imageBytes.length; i += 4) { 44 | int j = i / 4; 45 | imageBytes[i + 0] = (byte) (imageTensor[0 * DESIRED_SIDE * DESIRED_SIDE + j] + mean_r); 46 | imageBytes[i + 1] = (byte) (imageTensor[1 * DESIRED_SIDE * DESIRED_SIDE + j] + mean_g); 47 | imageBytes[i + 2] = (byte) (imageTensor[2 * DESIRED_SIDE * DESIRED_SIDE + j] + mean_b); 48 | } 49 | Bitmap bitmap = Bitmap.createBitmap(DESIRED_SIDE, DESIRED_SIDE, Bitmap.Config.ARGB_8888); 50 | bitmap.copyPixelsFromBuffer(ByteBuffer.wrap(imageBytes)); 51 | //Tool.saveBitmap(bitmap); 52 | return bitmap; 53 | } 54 | 55 | private static Bitmap processBitmap(final Bitmap origin) { 56 | final int originWidth = origin.getWidth(); 57 | final int originHeight = origin.getHeight(); 58 | int height = SHORTER_SIDE; 59 | int width = SHORTER_SIDE; 60 | if (originWidth < originHeight) { 61 | height = (int) ((float) originHeight / originWidth * width); 62 | } else { 63 | width = (int) ((float) originWidth / originHeight * height); 64 | } 65 | final Bitmap scaled = Bitmap.createScaledBitmap(origin, width, height, false); 66 | int y = (height - DESIRED_SIDE) / 2; 67 | int x = (width - DESIRED_SIDE) / 2; 68 | return Bitmap.createBitmap(scaled, x, y, DESIRED_SIDE, DESIRED_SIDE); 69 | 70 | } 71 | 72 | } 73 | -------------------------------------------------------------------------------- /app/src/main/java/neural/imagerecognizer/app/ui/activities/BaseActivity.java: -------------------------------------------------------------------------------- 1 | package neural.imagerecognizer.app.ui.activities; 2 | 3 | import android.content.Intent; 4 | import android.content.pm.PackageManager; 5 | import neural.imagerecognizer.app.util.Tool; 6 | import android.support.annotation.LayoutRes; 7 | import android.support.annotation.NonNull; 8 | import android.support.v4.app.ActivityCompat; 9 | import android.support.v4.content.ContextCompat; 10 | import android.support.v7.app.AppCompatActivity; 11 | import android.view.View; 12 | import butterknife.ButterKnife; 13 | 14 | public abstract class BaseActivity extends AppCompatActivity { 15 | 16 | private static final int REQUEST_PERMISSION = 100; 17 | 18 | private static final int REQUEST_CODE = 101; 19 | 20 | private CallbackResult callback; 21 | private PermissionCallback permissionCallback; 22 | 23 | @Override 24 | public void setContentView(@LayoutRes int layoutResID) { 25 | super.setContentView(layoutResID); 26 | bind(); 27 | } 28 | 29 | @Override 30 | public void setContentView(View view) { 31 | super.setContentView(view); 32 | bind(); 33 | } 34 | 35 | private void bind() { 36 | ButterKnife.bind(this); 37 | } 38 | 39 | @Override 40 | protected void onDestroy() { 41 | super.onDestroy(); 42 | ButterKnife.unbind(this); 43 | } 44 | 45 | public void startActivityForResult(Intent intent, CallbackResult callback) { 46 | this.callback = callback; 47 | super.startActivityForResult(intent, REQUEST_CODE); 48 | } 49 | 50 | public void requestPermission(PermissionCallback permissionCallback) { 51 | final String permission = permissionCallback.getPermissionName();// Manifest.permission.CAMERA; 52 | 53 | if (ContextCompat.checkSelfPermission(this, permission) != PackageManager.PERMISSION_GRANTED) { 54 | 55 | if (ActivityCompat.shouldShowRequestPermissionRationale(this, permission)) { 56 | permissionCallback.onFail(); 57 | } else { 58 | // Handle the result in Activity#onRequestPermissionResult(int, String[], int[]) 59 | this.permissionCallback = permissionCallback; 60 | ActivityCompat.requestPermissions(this, new String[]{permission}, REQUEST_PERMISSION); 61 | } 62 | } else { 63 | permissionCallback.onPermissionGranted(); 64 | } 65 | } 66 | 67 | @Override 68 | public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) { 69 | super.onRequestPermissionsResult(requestCode, permissions, grantResults); 70 | if (requestCode != REQUEST_PERMISSION || permissionCallback == null) 71 | return; 72 | for (int i = 0; i < permissions.length; i++) { 73 | String permission = permissions[i]; 74 | int grantResult = grantResults[i]; 75 | boolean granted = grantResult == PackageManager.PERMISSION_GRANTED; 76 | 77 | if (permission.equals(permissionCallback.getPermissionName())) { 78 | if (granted) 79 | permissionCallback.onPermissionGranted(); 80 | else 81 | permissionCallback.onFail(); 82 | break; 83 | } 84 | } 85 | permissionCallback = null; 86 | } 87 | 88 | @Override 89 | protected void onActivityResult(int requestCode, int resultCode, Intent data) { 90 | super.onActivityResult(requestCode, resultCode, data); 91 | 92 | boolean resultOk = resultCode == RESULT_OK && requestCode == REQUEST_CODE && callback != null && data != null; 93 | if (resultOk) 94 | callback.onResult(data); 95 | else 96 | Tool.log("data nullable is %s", data == null); 97 | callback = null; 98 | } 99 | 100 | public interface PermissionCallback { 101 | void onPermissionGranted(); 102 | 103 | void onFail(); 104 | 105 | @NonNull 106 | String getPermissionName(); 107 | } 108 | 109 | public interface CallbackResult { 110 | void onResult(@NonNull Intent data); 111 | } 112 | } 113 | -------------------------------------------------------------------------------- /app/src/main/java/neural/imagerecognizer/app/ui/activities/MainActivity.java: -------------------------------------------------------------------------------- 1 | package neural.imagerecognizer.app.ui.activities; 2 | 3 | import android.Manifest; 4 | import android.content.Intent; 5 | import android.graphics.Bitmap; 6 | import android.graphics.BitmapFactory; 7 | import android.support.annotation.Nullable; 8 | import neural.imagerecognizer.app.R; 9 | import neural.imagerecognizer.app.nn.NNManager; 10 | import neural.imagerecognizer.app.ui.views.PaintView; 11 | import neural.imagerecognizer.app.ui.views.WhatisButton; 12 | import neural.imagerecognizer.app.util.ToastImageDescription; 13 | import neural.imagerecognizer.app.util.Tool; 14 | import android.net.Uri; 15 | import android.support.annotation.NonNull; 16 | import android.os.Bundle; 17 | import android.view.Menu; 18 | import android.view.MenuItem; 19 | import butterknife.Bind; 20 | import butterknife.OnClick; 21 | import com.desmond.squarecamera.CameraActivity; 22 | 23 | import java.io.FileNotFoundException; 24 | import java.io.InputStream; 25 | 26 | public class MainActivity extends BaseActivity { 27 | 28 | @Bind(R.id.btnWhatis) 29 | WhatisButton btnWhatis; 30 | 31 | @Bind(R.id.paintView) 32 | PaintView paintView; 33 | 34 | @Nullable 35 | private Bitmap recognBitmap; 36 | 37 | @Override 38 | protected void onCreate(Bundle savedInstanceState) { 39 | super.onCreate(savedInstanceState); 40 | setContentView(R.layout.activity_main); 41 | 42 | } 43 | 44 | @OnClick(R.id.btnWhatis) 45 | public void whatisClick() { 46 | 47 | if (paintView.isModePaint()) { 48 | recognBitmap = paintView.getPaintedBitmap(); 49 | } else if (paintView.isModePhoto()) 50 | if (recognBitmap == null) 51 | return; 52 | 53 | btnWhatis.startAnimation(); 54 | NNManager.shared().identifyImage(recognBitmap, new NNManager.Callback() { 55 | @Override 56 | public void onResult(@NonNull String description) { 57 | btnWhatis.endAnimation(); 58 | //set image description.... 59 | ToastImageDescription.show(MainActivity.this, description); 60 | } 61 | }); 62 | 63 | } 64 | 65 | @OnClick(R.id.ivErse) 66 | public void clean() { 67 | 68 | if (paintView.isModePhoto()) 69 | paintView.setModePaint(); 70 | 71 | paintView.clearBitmap(); 72 | recognBitmap = null; 73 | Tool.showToast(this, R.string.toast_cleared); 74 | } 75 | 76 | @OnClick(R.id.ivPencil) 77 | public void enablePaintMode() { 78 | paintView.setModePaint(); 79 | } 80 | 81 | @OnClick(R.id.ivGallery) 82 | public void selectFromGallery() { 83 | Intent photoPickerIntent = new Intent(Intent.ACTION_PICK); 84 | photoPickerIntent.setType("image/*"); 85 | startActivityForResult(photoPickerIntent, new CallbackResult() { 86 | @Override 87 | public void onResult(@NonNull Intent data) { 88 | setImageFromIntent(data); 89 | } 90 | 91 | }); 92 | } 93 | 94 | @OnClick(R.id.ivCamera) 95 | public void selectFromCamera() { 96 | requestPermission(new PermissionCallback() { 97 | @Override 98 | public void onPermissionGranted() { 99 | Intent startCustomCameraIntent = new Intent(MainActivity.this, CameraActivity.class); 100 | startActivityForResult(startCustomCameraIntent, new CallbackResult() { 101 | @Override 102 | public void onResult(@NonNull Intent data) { 103 | setImageFromIntent(data); 104 | } 105 | }); 106 | } 107 | 108 | @Override 109 | public void onFail() { 110 | Tool.showToast(MainActivity.this, "Please give camera permission!"); 111 | } 112 | 113 | @NonNull 114 | @Override 115 | public String getPermissionName() { 116 | return Manifest.permission.CAMERA; 117 | } 118 | }); 119 | } 120 | 121 | private void setImageFromIntent(Intent data) { 122 | try { 123 | Uri imageUri = data.getData(); 124 | InputStream imageStream = getContentResolver().openInputStream(imageUri); 125 | 126 | Bitmap bitmap = BitmapFactory.decodeStream(imageStream); 127 | this.recognBitmap = bitmap; 128 | paintView.setPhoto(bitmap); 129 | } catch (FileNotFoundException e) { 130 | e.printStackTrace(); 131 | } 132 | } 133 | 134 | @Override 135 | public boolean onCreateOptionsMenu(Menu menu) { 136 | getMenuInflater().inflate(R.menu.menu_main, menu); 137 | return true; 138 | } 139 | 140 | @Override 141 | public boolean onOptionsItemSelected(MenuItem item) { 142 | int id = item.getItemId(); 143 | 144 | if (id == R.id.action_share) { 145 | Tool.shareText(this, Tool.generateGooglePlayLink()); 146 | return true; 147 | } 148 | 149 | return super.onOptionsItemSelected(item); 150 | } 151 | 152 | } 153 | -------------------------------------------------------------------------------- /app/src/main/java/neural/imagerecognizer/app/ui/activities/SettingsActivity.java: -------------------------------------------------------------------------------- 1 | package neural.imagerecognizer.app.ui.activities; 2 | 3 | import android.os.Bundle; 4 | import android.support.annotation.Nullable; 5 | import neural.imagerecognizer.app.R; 6 | 7 | public class SettingsActivity extends BaseActivity { 8 | @Override 9 | protected void onCreate(@Nullable Bundle savedInstanceState) { 10 | super.onCreate(savedInstanceState); 11 | setContentView(R.layout.activity_settings); 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /app/src/main/java/neural/imagerecognizer/app/ui/views/PaintView.java: -------------------------------------------------------------------------------- 1 | package neural.imagerecognizer.app.ui.views; 2 | 3 | import android.content.Context; 4 | import android.graphics.*; 5 | import android.support.annotation.NonNull; 6 | import android.support.annotation.Nullable; 7 | import android.util.AttributeSet; 8 | import android.view.MotionEvent; 9 | import android.widget.ImageView; 10 | import neural.imagerecognizer.app.util.Tool; 11 | 12 | public class PaintView extends ImageView { 13 | private Bitmap mBitmap; 14 | private Canvas mCanvas; 15 | private Path mPath; 16 | private Paint mBitmapPaint; 17 | private Paint mPaint; 18 | private Mode mode = Mode.PAINT; 19 | 20 | public PaintView(Context c) { 21 | super(c); 22 | init(); 23 | } 24 | 25 | public PaintView(Context context, @Nullable AttributeSet attrs) { 26 | super(context, attrs); 27 | init(); 28 | } 29 | 30 | private void init() { 31 | mPaint = new Paint(); 32 | mPaint.setAntiAlias(true); 33 | mPaint.setDither(true); 34 | mPaint.setColor(Color.BLUE); 35 | mPaint.setStyle(Paint.Style.STROKE); 36 | mPaint.setStrokeJoin(Paint.Join.ROUND); 37 | mPaint.setStrokeCap(Paint.Cap.ROUND); 38 | mPaint.setStrokeWidth(9); 39 | 40 | 41 | mPath = new Path(); 42 | mBitmapPaint = new Paint(Paint.DITHER_FLAG); 43 | } 44 | 45 | @Override 46 | protected void onSizeChanged(int w, int h, int oldw, int oldh) { 47 | super.onSizeChanged(w, h, oldw, oldh); 48 | 49 | recreateBitmap(w, h); 50 | } 51 | 52 | @Override 53 | protected void onDraw(@NonNull Canvas canvas) { 54 | super.onDraw(canvas); 55 | if (isModePhoto()) 56 | return; 57 | canvas.drawBitmap(mBitmap, 0, 0, mBitmapPaint); 58 | canvas.drawPath(mPath, mPaint); 59 | 60 | } 61 | 62 | private float mX, mY; 63 | private static final float TOUCH_TOLERANCE = 4; 64 | 65 | private void touch_start(float x, float y) { 66 | mPath.reset(); 67 | mPath.moveTo(x, y); 68 | mX = x; 69 | mY = y; 70 | } 71 | 72 | private void touch_move(float x, float y) { 73 | float dx = Math.abs(x - mX); 74 | float dy = Math.abs(y - mY); 75 | if (dx >= TOUCH_TOLERANCE || dy >= TOUCH_TOLERANCE) { 76 | mPath.quadTo(mX, mY, (x + mX) / 2, (y + mY) / 2); 77 | mX = x; 78 | mY = y; 79 | } 80 | } 81 | 82 | @Override 83 | public void setImageBitmap(Bitmap bm) { 84 | invalidate(); 85 | super.setImageBitmap(bm); 86 | } 87 | 88 | public void setPhoto(Bitmap bitmap) { 89 | setModePhoto(); 90 | setImageBitmap(bitmap); 91 | } 92 | 93 | 94 | public void setModePaint() { 95 | clearBitmap(); 96 | mode = Mode.PAINT; 97 | } 98 | 99 | public void setModePhoto() { 100 | this.mode = Mode.PHOTO; 101 | } 102 | 103 | private void touch_up() { 104 | mPath.lineTo(mX, mY); 105 | // commit the path to our offscreen 106 | mCanvas.drawPath(mPath, mPaint); 107 | // kill this so we don't double draw 108 | mPath.reset(); 109 | } 110 | 111 | @Override 112 | public boolean onTouchEvent(@NonNull MotionEvent event) { 113 | if (isModePhoto()) 114 | return true; 115 | float x = event.getX(); 116 | float y = event.getY(); 117 | 118 | switch (event.getAction()) { 119 | case MotionEvent.ACTION_DOWN: 120 | touch_start(x, y); 121 | invalidate(); 122 | break; 123 | case MotionEvent.ACTION_MOVE: 124 | touch_move(x, y); 125 | invalidate(); 126 | break; 127 | case MotionEvent.ACTION_UP: 128 | touch_up(); 129 | invalidate(); 130 | break; 131 | } 132 | return true; 133 | } 134 | 135 | public boolean isModePaint() { 136 | return mode == Mode.PAINT; 137 | } 138 | 139 | public boolean isModePhoto() { 140 | return mode == Mode.PHOTO; 141 | } 142 | 143 | public Mode getMode() { 144 | return mode; 145 | } 146 | 147 | public Bitmap getPaintedBitmap() { 148 | return mBitmap; 149 | } 150 | 151 | public void clearBitmap() { 152 | setImageBitmap(null); 153 | recreateBitmap(getWidth(), getHeight()); 154 | invalidate(); 155 | Tool.log("btmap size: %s, %s", mBitmap.getWidth(), mBitmap.getHeight()); 156 | } 157 | 158 | private void recreateBitmap(int width, int height) { 159 | mBitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); 160 | mCanvas = new Canvas(mBitmap); 161 | //mCanvas.drawARGB(255, 255, 255, 255); 162 | } 163 | 164 | public enum Mode { 165 | PAINT, PHOTO 166 | } 167 | } 168 | -------------------------------------------------------------------------------- /app/src/main/java/neural/imagerecognizer/app/ui/views/WhatisButton.java: -------------------------------------------------------------------------------- 1 | package neural.imagerecognizer.app.ui.views; 2 | 3 | import android.animation.*; 4 | import android.content.Context; 5 | import android.graphics.Color; 6 | import android.util.AttributeSet; 7 | import android.view.View; 8 | import android.widget.Button; 9 | import neural.imagerecognizer.app.R; 10 | 11 | public class WhatisButton extends Button { 12 | private static final long ANIMATION_DURATION = 1000; 13 | private AnimatorSet animator; 14 | 15 | public WhatisButton(Context context) { 16 | super(context); 17 | init(); 18 | } 19 | 20 | 21 | public WhatisButton(Context context, AttributeSet attrs) { 22 | super(context, attrs); 23 | init(); 24 | } 25 | 26 | private void init() { 27 | 28 | setBackgroundColor(Color.BLUE); 29 | setTextColor(Color.WHITE); 30 | setText(R.string.label_whatis); 31 | 32 | ValueAnimator animatorBackground = ObjectAnimator.ofInt(this, "backgroundColor", Color.BLUE, Color.CYAN, Color.BLUE); 33 | 34 | animatorBackground.setEvaluator(new ArgbEvaluator()); 35 | animatorBackground.setDuration(ANIMATION_DURATION); 36 | animatorBackground.setRepeatCount(ValueAnimator.INFINITE); 37 | animatorBackground.setRepeatMode(ValueAnimator.REVERSE); 38 | 39 | this.animator = new AnimatorSet(); 40 | animator.playTogether(animatorBackground);//, scaleDownX, scaleDownY); 41 | 42 | } 43 | 44 | public void startAnimation() { 45 | setText(R.string.label_recognizing); 46 | animator.start(); 47 | setClickable(false); 48 | setFocusable(false); 49 | } 50 | 51 | public void endAnimation() { 52 | setText(R.string.label_whatis); 53 | animator.end(); 54 | setClickable(true); 55 | setFocusable(true); 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /app/src/main/java/neural/imagerecognizer/app/util/AppUncaughtExceptionHandler.java: -------------------------------------------------------------------------------- 1 | package neural.imagerecognizer.app.util; 2 | 3 | import android.app.AlarmManager; 4 | import android.app.PendingIntent; 5 | import android.content.Context; 6 | import android.content.Intent; 7 | import neural.imagerecognizer.app.ui.activities.BaseActivity; 8 | import neural.imagerecognizer.app.ui.activities.MainActivity; 9 | 10 | public class AppUncaughtExceptionHandler implements Thread.UncaughtExceptionHandler { 11 | private Context context; 12 | 13 | public AppUncaughtExceptionHandler(Context context) { 14 | this.context = context; 15 | } 16 | 17 | @Override 18 | public void uncaughtException(Thread thread, Throwable ex) { 19 | Tool.log("Exception: " + ex); 20 | 21 | Intent intent = new Intent(context, MainActivity.class); 22 | intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); 23 | 24 | 25 | PendingIntent pendingIntent = PendingIntent.getActivity(context, 0, 26 | new Intent(intent), PendingIntent.FLAG_UPDATE_CURRENT); 27 | 28 | AlarmManager manager = (AlarmManager) context.getSystemService(Context.ALARM_SERVICE); 29 | manager.set(AlarmManager.RTC, System.currentTimeMillis() + 15, pendingIntent); 30 | System.exit(2); 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /app/src/main/java/neural/imagerecognizer/app/util/ThreadManager.java: -------------------------------------------------------------------------------- 1 | package neural.imagerecognizer.app.util; 2 | 3 | import android.os.Handler; 4 | import android.os.Looper; 5 | import android.support.annotation.MainThread; 6 | import android.support.annotation.NonNull; 7 | import android.support.annotation.Nullable; 8 | 9 | import java.util.concurrent.*; 10 | 11 | public class ThreadManager { 12 | 13 | private static ThreadManager instance; 14 | private ExecutorService pool; 15 | 16 | private ThreadManager() { 17 | pool = Executors.newCachedThreadPool(); 18 | } 19 | 20 | public static synchronized ThreadManager getInstance() { 21 | if (instance == null) instance = new ThreadManager(); 22 | return instance; 23 | } 24 | 25 | public void execute(Runnable r) { 26 | pool.execute(r); 27 | } 28 | 29 | public void execute(final Executor executor) { 30 | pool.execute(new Runnable() { 31 | @Override 32 | public void run() { 33 | try { 34 | final T data = executor.onExecute(); 35 | if (data != null) 36 | runOnMainThread(new Runnable() { 37 | @Override 38 | public void run() { 39 | try { 40 | executor.onCallback(data); 41 | } catch (Exception e) { 42 | Tool.log("main thread error"); 43 | } 44 | } 45 | }); 46 | } catch (final Exception e) { 47 | Runnable onError = new Runnable() { 48 | @Override 49 | public void run() { 50 | e.printStackTrace(); 51 | executor.onError(e); 52 | } 53 | }; 54 | runOnMainThread(onError); 55 | } 56 | } 57 | }); 58 | } 59 | 60 | private void runOnMainThread(Runnable r) { 61 | new Handler(Looper.getMainLooper()).post(r); 62 | } 63 | 64 | public void end() { 65 | pool.shutdown(); 66 | try { 67 | if (!pool.awaitTermination(5, TimeUnit.SECONDS)) { 68 | pool.shutdownNow(); 69 | if (!pool.awaitTermination(5, TimeUnit.SECONDS)) 70 | System.err.println("Pool did not terminate"); 71 | } 72 | } catch (InterruptedException ie) { 73 | pool.shutdownNow(); 74 | Thread.currentThread().interrupt(); 75 | } 76 | instance = null; 77 | } 78 | 79 | public interface Executor { 80 | @Nullable 81 | T onExecute() throws Exception; 82 | 83 | @MainThread 84 | void onCallback(@NonNull T data); 85 | 86 | @MainThread 87 | void onError(Exception e); 88 | } 89 | } -------------------------------------------------------------------------------- /app/src/main/java/neural/imagerecognizer/app/util/ToastImageDescription.java: -------------------------------------------------------------------------------- 1 | package neural.imagerecognizer.app.util; 2 | 3 | import android.content.Context; 4 | import android.view.Gravity; 5 | import android.view.LayoutInflater; 6 | import android.view.TextureView; 7 | import android.view.View; 8 | import android.widget.TextView; 9 | import android.widget.Toast; 10 | import neural.imagerecognizer.app.R; 11 | 12 | public class ToastImageDescription { 13 | public static void show(final Context context, final String message) { 14 | 15 | Tool.runOnMainThread(new Runnable() { 16 | @Override 17 | public void run() { 18 | Toast toast = new Toast(context); 19 | toast.setGravity(Gravity.TOP, 0, (int) (Tool.getToolbarHeight() * 1.5)); 20 | toast.setDuration(Toast.LENGTH_LONG); 21 | TextView tv = (TextView) LayoutInflater.from(context).inflate(R.layout.toast_image_description, null); 22 | tv.setText(message); 23 | toast.setView(tv); 24 | toast.show(); 25 | } 26 | }); 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /app/src/main/java/neural/imagerecognizer/app/util/Tool.java: -------------------------------------------------------------------------------- 1 | package neural.imagerecognizer.app.util; 2 | 3 | import android.Manifest; 4 | import android.app.Activity; 5 | import android.content.Context; 6 | import android.content.Intent; 7 | import android.graphics.Bitmap; 8 | import android.os.Handler; 9 | import android.os.Looper; 10 | import android.support.annotation.*; 11 | import android.util.Log; 12 | import android.util.TypedValue; 13 | import android.widget.Toast; 14 | import neural.imagerecognizer.app.RecognitionApp; 15 | 16 | import java.io.*; 17 | import java.util.ArrayList; 18 | import java.util.List; 19 | import java.util.Random; 20 | 21 | public final class Tool { 22 | 23 | private Tool() { 24 | } 25 | 26 | 27 | public static void log(String s, Object... args) { 28 | log(String.format(s, args)); 29 | } 30 | 31 | public static void log(String s) { 32 | Log.v("ImageRecognizer", s); 33 | } 34 | 35 | public static void showToast(final Context context, final String message) { 36 | runOnMainThread(new Runnable() { 37 | @Override 38 | public void run() { 39 | Toast.makeText(context, message, Toast.LENGTH_SHORT).show(); 40 | } 41 | }); 42 | } 43 | 44 | public static void showToast(Context context, @StringRes int message) { 45 | showToast(context, context.getString(message)); 46 | } 47 | 48 | public static void showToast(@StringRes int message) { 49 | Context context = RecognitionApp.getInstance(); 50 | showToast(context, context.getString(message)); 51 | } 52 | 53 | public static void showToast(String message) { 54 | Context context = RecognitionApp.getInstance(); 55 | showToast(context, message); 56 | } 57 | 58 | public static void runOnMainThread(Runnable runnable) { 59 | new Handler(Looper.getMainLooper()).post(runnable); 60 | } 61 | 62 | public static byte[] readRawFile(@RawRes int resId) { 63 | ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); 64 | int size = 0; 65 | byte[] buffer = new byte[1024]; 66 | try { 67 | InputStream ins = RecognitionApp.getInstance().getApplicationContext().getResources().openRawResource(resId); 68 | while ((size = ins.read(buffer, 0, 1024)) >= 0) { 69 | outputStream.write(buffer, 0, size); 70 | } 71 | } catch (IOException e) { 72 | e.printStackTrace(); 73 | } 74 | return outputStream.toByteArray(); 75 | } 76 | 77 | 78 | public static String readRawTextFile(@RawRes int resId) { 79 | StringBuilder result = new StringBuilder(); 80 | InputStream inputStream = RecognitionApp.getInstance().getApplicationContext().getResources().openRawResource(resId); 81 | 82 | InputStreamReader inputreader = new InputStreamReader(inputStream); 83 | BufferedReader buffreader = new BufferedReader(inputreader); 84 | String line; 85 | 86 | try { 87 | while ((line = buffreader.readLine()) != null) { 88 | result.append(line); 89 | } 90 | } catch (IOException e) { 91 | e.printStackTrace(); 92 | } 93 | return result.toString(); 94 | } 95 | 96 | public static List readRawTextFileAsList(@RawRes int resId) { 97 | List result = new ArrayList<>(); 98 | InputStream inputStream = RecognitionApp.getInstance().getApplicationContext().getResources().openRawResource(resId); 99 | 100 | InputStreamReader inputreader = new InputStreamReader(inputStream); 101 | BufferedReader buffreader = new BufferedReader(inputreader); 102 | String line; 103 | 104 | try { 105 | while ((line = buffreader.readLine()) != null) { 106 | result.add(line); 107 | } 108 | } catch (IOException e) { 109 | e.printStackTrace(); 110 | } 111 | return result; 112 | } 113 | 114 | //Debug 115 | public static void saveBitmap(final Bitmap bitmap) { 116 | RecognitionApp.tm.execute(new Runnable() { 117 | @Override 118 | public void run() { 119 | File myDir = new File("/sdcard/saved_images"); 120 | myDir.mkdirs(); 121 | Random generator = new Random(); 122 | int n = 10000; 123 | n = generator.nextInt(n); 124 | String fname = String.format("Image-%s.jpg", n); 125 | File file = new File(myDir, fname); 126 | if (file.exists()) file.delete(); 127 | try { 128 | FileOutputStream out = new FileOutputStream(file); 129 | bitmap.compress(Bitmap.CompressFormat.JPEG, 90, out); 130 | out.flush(); 131 | out.close(); 132 | } catch (Exception e) { 133 | e.printStackTrace(); 134 | } 135 | } 136 | }); 137 | } 138 | 139 | public static void shareText(Context context, @NonNull String str) { 140 | Intent sendIntent = new Intent(); 141 | sendIntent.setAction(Intent.ACTION_SEND); 142 | sendIntent.putExtra(Intent.EXTRA_TEXT, str); 143 | sendIntent.setType("text/plain"); 144 | context.startActivity(sendIntent); 145 | } 146 | 147 | public static String generateGooglePlayLink() { 148 | 149 | return String.format("https://play.google.com/store/apps/details?id=%s", 150 | RecognitionApp.getInstance().getPackageName()); 151 | } 152 | 153 | public static int getToolbarHeight() { 154 | Context ctx = RecognitionApp.getInstance().getApplicationContext(); 155 | TypedValue tv = new TypedValue(); 156 | if (ctx.getTheme().resolveAttribute(android.R.attr.actionBarSize, tv, true)) { 157 | return TypedValue.complexToDimensionPixelSize(tv.data, 158 | ctx.getResources().getDisplayMetrics()); 159 | } 160 | return 0; 161 | } 162 | } 163 | -------------------------------------------------------------------------------- /app/src/main/java/org/dmlc/mxnet/MxnetException.java: -------------------------------------------------------------------------------- 1 | package org.dmlc.mxnet; 2 | 3 | public class MxnetException extends Exception { 4 | public MxnetException(){} 5 | public MxnetException(String txt) { 6 | super(txt); 7 | } 8 | } 9 | 10 | -------------------------------------------------------------------------------- /app/src/main/java/org/dmlc/mxnet/Predictor.java: -------------------------------------------------------------------------------- 1 | package org.dmlc.mxnet; 2 | 3 | public class Predictor { 4 | static { 5 | System.loadLibrary("mxnet_predict"); 6 | } 7 | 8 | public static class InputNode { 9 | String key; 10 | int[] shape; 11 | public InputNode(String key, int[] shape) { 12 | this.key = key; 13 | this.shape = shape; 14 | } 15 | } 16 | 17 | public static class Device { 18 | public enum Type { 19 | CPU, GPU, CPU_PINNED 20 | } 21 | 22 | public Device(Type t, int i) { 23 | this.type = t; 24 | this.id = i; 25 | } 26 | 27 | Type type; 28 | int id; 29 | int ctype() { 30 | return this.type == Type.CPU? 1: this.type == Type.GPU? 2: 3; 31 | } 32 | } 33 | 34 | private long handle = 0; 35 | 36 | public Predictor(byte[] symbol, byte[] params, Device dev, InputNode[] input) { 37 | String[] keys = new String[input.length]; 38 | int[][] shapes = new int[input.length][]; 39 | for (int i=0; i 2 | 4 | 8 | 10 | 11 | 12 | -------------------------------------------------------------------------------- /app/src/main/res/drawable/toast_backround.xml: -------------------------------------------------------------------------------- 1 | 2 | 4 | 9 | 11 | 13 | 16 | 17 | 18 | -------------------------------------------------------------------------------- /app/src/main/res/layout/activity_main.xml: -------------------------------------------------------------------------------- 1 | 2 | 7 | 8 | 9 | 13 | 14 | 15 | 25 | 26 | 36 | 37 | 47 | 48 | 55 | 56 | 65 | 66 | 67 | 77 | 78 | 79 | -------------------------------------------------------------------------------- /app/src/main/res/layout/activity_settings.xml: -------------------------------------------------------------------------------- 1 | 2 | 7 | 8 | 9 | 13 | 14 | 15 | 26 | 27 | 37 | 38 | 48 | 49 |