├── .gitignore
├── 361camera
├── .gitignore
├── build.gradle
├── proguard-rules.pro
└── src
│ ├── androidTest
│ └── java
│ │ └── org
│ │ └── hunter
│ │ └── a361camera
│ │ └── ExampleInstrumentedTest.java
│ ├── main
│ ├── AndroidManifest.xml
│ ├── java
│ │ └── org
│ │ │ └── hunter
│ │ │ └── a361camera
│ │ │ ├── base
│ │ │ └── CameraApplication.java
│ │ │ ├── camera
│ │ │ └── MainActivity.java
│ │ │ ├── util
│ │ │ ├── ActivityUtils.java
│ │ │ ├── AutoFocusHelper.java
│ │ │ ├── CameraUtil.java
│ │ │ └── LogUtil.java
│ │ │ ├── view
│ │ │ └── Camera2Fragment.java
│ │ │ └── widget
│ │ │ ├── AutoFitTextureView.java
│ │ │ └── PorterDuffImageView.java
│ └── res
│ │ ├── anim
│ │ └── scale.xml
│ │ ├── drawable
│ │ └── focus_area.xml
│ │ ├── layout-land
│ │ └── fragment_camera2.xml
│ │ ├── layout
│ │ ├── activity_main.xml
│ │ └── fragment_camera2.xml
│ │ ├── mipmap-hdpi
│ │ └── ic_launcher.png
│ │ ├── mipmap-mdpi
│ │ └── ic_launcher.png
│ │ ├── mipmap-xhdpi
│ │ └── ic_launcher.png
│ │ ├── mipmap-xxhdpi
│ │ ├── capture.png
│ │ ├── flash_auto.png
│ │ ├── flash_off.png
│ │ ├── flash_on.png
│ │ ├── hdr_off.png
│ │ ├── hdr_on.png
│ │ ├── ic_10s.png
│ │ ├── ic_3s.png
│ │ ├── ic_launcher.png
│ │ ├── switch_camera.png
│ │ └── timer.png
│ │ ├── mipmap-xxxhdpi
│ │ └── ic_launcher.png
│ │ ├── values-w820dp
│ │ └── dimens.xml
│ │ └── values
│ │ ├── colors.xml
│ │ ├── dimens.xml
│ │ ├── strings.xml
│ │ └── styles.xml
│ └── test
│ └── java
│ └── org
│ └── hunter
│ └── a361camera
│ └── ExampleUnitTest.java
├── LICENSE
├── README.md
├── build.gradle
├── gradle.properties
├── gradle
└── wrapper
│ ├── gradle-wrapper.jar
│ └── gradle-wrapper.properties
├── gradlew
├── gradlew.bat
└── settings.gradle
/.gitignore:
--------------------------------------------------------------------------------
1 | # Built application files
2 | *.apk
3 | *.ap_
4 |
5 | # Files for the Dalvik VM
6 | *.dex
7 |
8 | # Java class files
9 | *.class
10 |
11 | # Generated files
12 | bin/
13 | gen/
14 |
15 | # Local configuration file (sdk path, etc)
16 | local.properties
17 |
18 | # OSX files
19 | .DS_Store
20 |
21 | # Android Studio
22 | *.iml
23 | .idea
24 |
25 | # Gradle files
26 | .gradle/
27 | build/
28 |
29 | # Local configuration file (sdk path, etc)
30 | local.properties
31 |
32 | # Proguard folder generated by Eclipse
33 | proguard/
34 |
35 | # Log Files
36 | *.log
--------------------------------------------------------------------------------
/361camera/.gitignore:
--------------------------------------------------------------------------------
1 | /build
2 |
--------------------------------------------------------------------------------
/361camera/build.gradle:
--------------------------------------------------------------------------------
1 | apply plugin: 'com.android.application'
2 | apply plugin: 'com.neenbedankt.android-apt'
3 |
4 | android {
5 | compileSdkVersion 25
6 | buildToolsVersion "25.0.2"
7 | defaultConfig {
8 | applicationId "org.hunter.a361camera"
9 | minSdkVersion 21
10 | targetSdkVersion 25
11 | versionCode 1
12 | versionName "1.0"
13 | testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
14 | }
15 | buildTypes {
16 | release {
17 | minifyEnabled false
18 | proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
19 | }
20 | }
21 | }
22 |
23 | dependencies {
24 | compile fileTree(dir: 'libs', include: ['*.jar'])
25 | androidTestCompile('com.android.support.test.espresso:espresso-core:2.2.2', {
26 | exclude group: 'com.android.support', module: 'support-annotations'
27 | })
28 | compile 'com.android.support:appcompat-v7:25.0.1'
29 | testCompile 'junit:junit:4.12'
30 | provided 'org.glassfish:javax.annotation:10.0-b28'
31 | compile 'com.google.guava:guava:19.0'
32 | compile "com.android.support:support-v13:25.0.1"
33 | }
34 |
--------------------------------------------------------------------------------
/361camera/proguard-rules.pro:
--------------------------------------------------------------------------------
1 | # Add project specific ProGuard rules here.
2 | # By default, the flags in this file are appended to flags specified
3 | # in /Users/gengqifu/Library/Android/sdk/tools/proguard/proguard-android.txt
4 | # You can edit the include path and order by changing the proguardFiles
5 | # directive in build.gradle.
6 | #
7 | # For more details, see
8 | # http://developer.android.com/guide/developing/tools/proguard.html
9 |
10 | # Add any project specific keep options here:
11 |
12 | # If your project uses WebView with JS, uncomment the following
13 | # and specify the fully qualified class name to the JavaScript interface
14 | # class:
15 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
16 | # public *;
17 | #}
18 |
--------------------------------------------------------------------------------
/361camera/src/androidTest/java/org/hunter/a361camera/ExampleInstrumentedTest.java:
--------------------------------------------------------------------------------
1 | package org.hunter.a361camera;
2 |
3 | import android.content.Context;
4 | import android.support.test.InstrumentationRegistry;
5 | import android.support.test.runner.AndroidJUnit4;
6 |
7 | import org.junit.Test;
8 | import org.junit.runner.RunWith;
9 |
10 | import static org.junit.Assert.assertEquals;
11 |
12 | /**
13 | * Instrumentation test, which will execute on an Android device.
14 | *
15 | * @see Testing documentation
16 | */
17 | @RunWith(AndroidJUnit4.class)
18 | public class ExampleInstrumentedTest {
19 | @Test
20 | public void useAppContext() throws Exception {
21 | // Context of the app under test.
22 | Context appContext = InstrumentationRegistry.getTargetContext();
23 |
24 | assertEquals("org.hunter.a361camera", appContext.getPackageName());
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/361camera/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 |
5 |
6 |
7 |
8 |
9 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
--------------------------------------------------------------------------------
/361camera/src/main/java/org/hunter/a361camera/base/CameraApplication.java:
--------------------------------------------------------------------------------
1 | package org.hunter.a361camera.base;
2 |
3 | import android.app.Application;
4 |
5 | public class CameraApplication extends Application {
6 |
7 | @Override
8 | public void onCreate() {
9 | super.onCreate();
10 |
11 | }
12 |
13 | }
14 |
--------------------------------------------------------------------------------
/361camera/src/main/java/org/hunter/a361camera/camera/MainActivity.java:
--------------------------------------------------------------------------------
1 | package org.hunter.a361camera.camera;
2 |
3 | import android.os.Bundle;
4 | import android.support.v7.app.AppCompatActivity;
5 |
6 | import org.hunter.a361camera.R;
7 | import org.hunter.a361camera.util.ActivityUtils;
8 | import org.hunter.a361camera.view.Camera2Fragment;
9 |
10 | public class MainActivity extends AppCompatActivity {
11 |
12 | @Override
13 | protected void onCreate(Bundle savedInstanceState) {
14 | super.onCreate(savedInstanceState);
15 | setContentView(R.layout.activity_main);
16 |
17 | Camera2Fragment camera2Fragment = (Camera2Fragment) getFragmentManager()
18 | .findFragmentById(R.id.contentFrame);
19 | if (camera2Fragment == null) {
20 | camera2Fragment = Camera2Fragment.newInstance();
21 | ActivityUtils.addFragmentToActivity(getFragmentManager(),
22 | camera2Fragment, R.id.contentFrame);
23 | }
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/361camera/src/main/java/org/hunter/a361camera/util/ActivityUtils.java:
--------------------------------------------------------------------------------
1 | package org.hunter.a361camera.util;
2 |
3 | import android.app.Fragment;
4 | import android.app.FragmentManager;
5 | import android.app.FragmentTransaction;
6 | import android.support.annotation.NonNull;
7 |
8 | import static com.google.common.base.Preconditions.checkNotNull;
9 |
10 | /**
11 | * This provides methods to help Activities load their UI.
12 | */
13 | public class ActivityUtils {
14 |
15 | /**
16 | * The {@code fragment} is added to the container view with id {@code frameId}. The operation is
17 | * performed by the {@code fragmentManager}.
18 | */
19 | public static void addFragmentToActivity(@NonNull FragmentManager fragmentManager,
20 | @NonNull Fragment fragment, int frameId) {
21 | checkNotNull(fragmentManager);
22 | checkNotNull(fragment);
23 | FragmentTransaction transaction = fragmentManager.beginTransaction();
24 | transaction.add(frameId, fragment);
25 | transaction.commit();
26 | }
27 |
28 | }
29 |
--------------------------------------------------------------------------------
/361camera/src/main/java/org/hunter/a361camera/util/AutoFocusHelper.java:
--------------------------------------------------------------------------------
1 | package org.hunter.a361camera.util;
2 |
3 | import android.graphics.PointF;
4 | import android.graphics.Rect;
5 | import android.hardware.camera2.CameraCharacteristics;
6 | import android.hardware.camera2.params.MeteringRectangle;
7 |
8 | public class AutoFocusHelper {
9 | private static final MeteringRectangle[] ZERO_WEIGHT_3A_REGION = new MeteringRectangle[]{
10 | new MeteringRectangle(0, 0, 0, 0, 0)};
11 |
12 | private static final float REGION_WEIGHT = 0.022f;
13 |
14 | /**
15 | * Width of touch AF region in [0,1] relative to shorter edge of the current
16 | * crop region. Multiply this number by the number of pixels along the
17 | * shorter edge of the current crop region's width to get a value in pixels.
18 | *
19 | *
20 | * This value has been tested on Nexus 5 and Shamu, but will need to be
21 | * tuned per device depending on how its ISP interprets the metering box and weight.
22 | *
23 | *
24 | *
25 | * Values prior to L release:
26 | * Normal mode: 0.125 * longest edge
27 | * Gcam: Fixed at 300px x 300px.
28 | *
29 | */
30 | private static final float AF_REGION_BOX = 0.2f;
31 |
32 | /**
33 | * Width of touch metering region in [0,1] relative to shorter edge of the
34 | * current crop region. Multiply this number by the number of pixels along
35 | * shorter edge of the current crop region's width to get a value in pixels.
36 | *
37 | *
38 | * This value has been tested on Nexus 5 and Shamu, but will need to be
39 | * tuned per device depending on how its ISP interprets the metering box and weight.
40 | *
41 | *
42 | *
43 | * Values prior to L release:
44 | * Normal mode: 0.1875 * longest edge
45 | * Gcam: Fixed at 300px x 300px.
46 | *
47 | */
48 | private static final float AE_REGION_BOX = 0.3f;
49 |
50 | /**
51 | * camera2 API metering region weight.
52 | */
53 | private static final int CAMERA2_REGION_WEIGHT = (int)
54 | (lerp(MeteringRectangle.METERING_WEIGHT_MIN, MeteringRectangle.METERING_WEIGHT_MAX,
55 | REGION_WEIGHT));
56 |
57 | private AutoFocusHelper() {
58 |
59 | }
60 |
61 | public static MeteringRectangle[] getZeroWeightRegion() {
62 | return ZERO_WEIGHT_3A_REGION;
63 | }
64 |
65 | public static float lerp(float a, float b, float t) {
66 | return a + t * (b - a);
67 | }
68 |
69 | /**
70 | * Calculates sensor crop region for a zoom level (zoom >= 1.0).
71 | *
72 | * @return Crop region.
73 | */
74 | public static Rect cropRegionForZoom(CameraCharacteristics characteristics, float zoom) {
75 | Rect sensor = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
76 | int xCenter = sensor.width() / 2;
77 | int yCenter = sensor.height() / 2;
78 | int xDelta = (int) (0.5f * sensor.width() / zoom);
79 | int yDelta = (int) (0.5f * sensor.height() / zoom);
80 | return new Rect(xCenter - xDelta, yCenter - yDelta, xCenter + xDelta, yCenter + yDelta);
81 | }
82 |
83 | public static MeteringRectangle[] regionsForNormalizedCoord(float nx, float ny,
84 | float fraction, final Rect cropRegion, int sensorOrientation) {
85 | // Compute half side length in pixels.
86 | int minCropEdge = Math.min(cropRegion.width(), cropRegion.height());
87 | int halfSideLength = (int) (0.5f * fraction * minCropEdge);
88 |
89 | // Compute the output MeteringRectangle in sensor space.
90 | // nx, ny is normalized to the screen.
91 | // Crop region itself is specified in sensor coordinates.
92 |
93 | // Normalized coordinates, now rotated into sensor space.
94 | PointF nsc = CameraUtil.normalizedSensorCoordsForNormalizedDisplayCoords(
95 | nx, ny, sensorOrientation);
96 |
97 | int xCenterSensor = (int) (cropRegion.left + nsc.x * cropRegion.width());
98 | int yCenterSensor = (int) (cropRegion.top + nsc.y * cropRegion.height());
99 |
100 | Rect meteringRegion = new Rect(xCenterSensor - halfSideLength,
101 | yCenterSensor - halfSideLength,
102 | xCenterSensor + halfSideLength,
103 | yCenterSensor + halfSideLength);
104 |
105 | // Clamp meteringRegion to cropRegion.
106 | meteringRegion.left = CameraUtil.clamp(meteringRegion.left, cropRegion.left, cropRegion.right);
107 | meteringRegion.top = CameraUtil.clamp(meteringRegion.top, cropRegion.top, cropRegion.bottom);
108 | meteringRegion.right = CameraUtil.clamp(meteringRegion.right, cropRegion.left, cropRegion.right);
109 | meteringRegion.bottom = CameraUtil.clamp(meteringRegion.bottom, cropRegion.top, cropRegion.bottom);
110 |
111 | return new MeteringRectangle[]{new MeteringRectangle(meteringRegion, CAMERA2_REGION_WEIGHT)};
112 | }
113 |
114 | /**
115 | * Return AE region(s) for a sensor-referenced touch coordinate.
116 | *
117 | *
118 | * Normalized coordinates are referenced to portrait preview window with
119 | * (0, 0) top left and (1, 1) bottom right. Rotation has no effect.
120 | *
121 | *
122 | * @return AE region(s).
123 | */
124 | public static MeteringRectangle[] aeRegionsForNormalizedCoord(float nx,
125 | float ny, final Rect cropRegion, int sensorOrientation) {
126 | return regionsForNormalizedCoord(nx, ny, AE_REGION_BOX,
127 | cropRegion, sensorOrientation);
128 | }
129 |
130 | /**
131 | * Return AF region(s) for a sensor-referenced touch coordinate.
132 | *
133 | *
134 | * Normalized coordinates are referenced to portrait preview window with
135 | * (0, 0) top left and (1, 1) bottom right. Rotation has no effect.
136 | *
137 | *
138 | * @return AF region(s).
139 | */
140 | public static MeteringRectangle[] afRegionsForNormalizedCoord(float nx,
141 | float ny, final Rect cropRegion, int sensorOrientation) {
142 | return regionsForNormalizedCoord(nx, ny, AF_REGION_BOX,
143 | cropRegion, sensorOrientation);
144 | }
145 | }
146 |
--------------------------------------------------------------------------------
/361camera/src/main/java/org/hunter/a361camera/util/CameraUtil.java:
--------------------------------------------------------------------------------
1 | package org.hunter.a361camera.util;
2 |
3 | import android.graphics.PointF;
4 | import android.graphics.Rect;
5 | import android.hardware.camera2.CameraCharacteristics;
6 | import android.hardware.camera2.params.MeteringRectangle;
7 | import android.util.SparseIntArray;
8 | import android.view.Surface;
9 |
10 | public class CameraUtil {
11 | private static final String TAG = "CameraUtil";
12 |
13 | private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
14 | private static final int MAX_PREVIEW_WIDTH = 1920;
15 | private static final int MAX_PREVIEW_HEIGHT = 1080;
16 |
17 | private static final MeteringRectangle[] ZERO_WEIGHT_3A_REGION = new MeteringRectangle[]{
18 | new MeteringRectangle(0, 0, 0, 0, 0)};
19 |
20 | static {
21 | ORIENTATIONS.append(Surface.ROTATION_0, 90);
22 | ORIENTATIONS.append(Surface.ROTATION_90, 0);
23 | ORIENTATIONS.append(Surface.ROTATION_180, 270);
24 | ORIENTATIONS.append(Surface.ROTATION_270, 180);
25 | }
26 |
27 | private CameraUtil() {
28 |
29 | }
30 |
31 |
32 | private static float calculateFullSizeAspectRatio(CameraCharacteristics characteristics) {
33 | Rect activeArraySize =
34 | characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
35 | return ((float) (activeArraySize.width())) / activeArraySize.height();
36 | }
37 |
38 | public static int getJPEGOrientation(int rotation, int sensorOrientation) {
39 | // Sensor orientation is 90 for most devices, or 270 for some devices (eg. Nexus 5X)
40 | // We have to take that into account and rotate JPEG properly.
41 | // For devices with orientation of 90, we simply return our mapping from ORIENTATIONS.
42 | // For devices with orientation of 270, we need to rotate the JPEG 180 degrees.
43 | return (ORIENTATIONS.get(rotation) + sensorOrientation + 270) % 360;
44 | }
45 |
46 | public static int getJPEGOrientation(int rotation, CameraCharacteristics cameraCharacteristics) {
47 | Integer sensorOrientation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
48 | sensorOrientation = sensorOrientation == null ? 0 : sensorOrientation;
49 | // Sensor orientation is 90 for most devices, or 270 for some devices (eg. Nexus 5X)
50 | // We have to take that into account and rotate JPEG properly.
51 | // For devices with orientation of 90, we simply return our mapping from ORIENTATIONS.
52 | // For devices with orientation of 270, we need to rotate the JPEG 180 degrees.
53 | return (ORIENTATIONS.get(rotation) + sensorOrientation + 270) % 360;
54 | }
55 |
56 | public static PointF normalizedSensorCoordsForNormalizedDisplayCoords(
57 | float nx, float ny, int sensorOrientation) {
58 | switch (sensorOrientation) {
59 | case 0:
60 | return new PointF(nx, ny);
61 | case 90:
62 | return new PointF(ny, 1.0f - nx);
63 | case 180:
64 | return new PointF(1.0f - nx, 1.0f - ny);
65 | case 270:
66 | return new PointF(1.0f - ny, nx);
67 | default:
68 | return null;
69 | }
70 | }
71 |
72 | /**
73 | * Clamps x to between min and max (inclusive on both ends, x = min --> min,
74 | * x = max --> max).
75 | */
76 | public static int clamp(int x, int min, int max) {
77 | if (x > max) {
78 | return max;
79 | }
80 | if (x < min) {
81 | return min;
82 | }
83 | return x;
84 | }
85 | }
86 |
--------------------------------------------------------------------------------
/361camera/src/main/java/org/hunter/a361camera/util/LogUtil.java:
--------------------------------------------------------------------------------
1 | package org.hunter.a361camera.util;
2 |
3 | import android.os.Environment;
4 | import android.text.TextUtils;
5 | import android.util.Log;
6 |
7 | import java.io.BufferedWriter;
8 | import java.io.File;
9 | import java.io.FileOutputStream;
10 | import java.io.IOException;
11 | import java.io.OutputStreamWriter;
12 | import java.text.SimpleDateFormat;
13 | import java.util.Date;
14 | import java.util.Formatter;
15 | import java.util.Locale;
16 |
17 | public class LogUtil {
18 |
19 | public static final String ROOT = Environment.getExternalStorageDirectory()
20 | .getPath() + "/7gz/";
21 | private static final boolean isSaveLog = true;
22 | private static final String PATH_LOG_INFO = ROOT + "7gz/";
23 | private static final ThreadLocal thread_local_formatter = new ThreadLocal() {
24 | protected ReusableFormatter initialValue() {
25 | return new ReusableFormatter();
26 | }
27 | };
28 | public static String customTagPrefix = "7gz";
29 | public static boolean allowD = true;
30 | public static boolean allowE = true;
31 | public static boolean allowI = true;
32 | public static boolean allowV = true;
33 | public static boolean allowW = true;
34 | public static boolean allowWtf = true;
35 | public static CustomLogger customLogger;
36 |
37 | private LogUtil() {
38 | }
39 |
40 | private static String generateTag(StackTraceElement caller) {
41 | String tag = "%s.%s(Line:%d)";
42 | String callerClazzName = caller.getClassName();
43 | callerClazzName = callerClazzName.substring(callerClazzName
44 | .lastIndexOf(".") + 1);
45 | tag = String.format(tag, callerClazzName, caller.getMethodName(),
46 | caller.getLineNumber());
47 | tag = TextUtils.isEmpty(customTagPrefix) ? tag : customTagPrefix + ":"
48 | + tag;
49 | return tag;
50 | }
51 |
52 | public static void d(String content) {
53 | if (!allowD)
54 | return;
55 | StackTraceElement caller = getCallerStackTraceElement();
56 | String tag = generateTag(caller);
57 |
58 | if (customLogger != null) {
59 | customLogger.d(tag, content);
60 | } else {
61 | Log.d(tag, content);
62 | }
63 | }
64 |
65 | public static void d(String content, Throwable tr) {
66 | if (!allowD)
67 | return;
68 | StackTraceElement caller = getCallerStackTraceElement();
69 | String tag = generateTag(caller);
70 |
71 | if (customLogger != null) {
72 | customLogger.d(tag, content, tr);
73 | } else {
74 | Log.d(tag, content, tr);
75 | }
76 | }
77 |
78 | public static void e(String content) {
79 | if (!allowE)
80 | return;
81 | StackTraceElement caller = getCallerStackTraceElement();
82 | String tag = generateTag(caller);
83 |
84 | if (customLogger != null) {
85 | customLogger.e(tag, content);
86 | } else {
87 | Log.e(tag, content);
88 | }
89 | if (isSaveLog) {
90 | point(PATH_LOG_INFO, tag, content);
91 | }
92 | }
93 |
94 | public static void e(String content, Throwable tr) {
95 | if (!allowE)
96 | return;
97 | StackTraceElement caller = getCallerStackTraceElement();
98 | String tag = generateTag(caller);
99 |
100 | if (customLogger != null) {
101 | customLogger.e(tag, content, tr);
102 | } else {
103 | Log.e(tag, content, tr);
104 | }
105 | if (isSaveLog) {
106 | point(PATH_LOG_INFO, tag, tr.getMessage());
107 | }
108 | }
109 |
110 | public static void i(String content) {
111 | if (!allowI)
112 | return;
113 | StackTraceElement caller = getCallerStackTraceElement();
114 | String tag = generateTag(caller);
115 |
116 | if (customLogger != null) {
117 | customLogger.i(tag, content);
118 | } else {
119 | Log.i(tag, content);
120 | }
121 |
122 | }
123 |
124 | public static void i(String content, Throwable tr) {
125 | if (!allowI)
126 | return;
127 | StackTraceElement caller = getCallerStackTraceElement();
128 | String tag = generateTag(caller);
129 |
130 | if (customLogger != null) {
131 | customLogger.i(tag, content, tr);
132 | } else {
133 | Log.i(tag, content, tr);
134 | }
135 |
136 | }
137 |
138 | public static void v(String content) {
139 | if (!allowV)
140 | return;
141 | StackTraceElement caller = getCallerStackTraceElement();
142 | String tag = generateTag(caller);
143 |
144 | if (customLogger != null) {
145 | customLogger.v(tag, content);
146 | } else {
147 | Log.v(tag, content);
148 | }
149 | }
150 |
151 | public static void v(String content, Throwable tr) {
152 | if (!allowV)
153 | return;
154 | StackTraceElement caller = getCallerStackTraceElement();
155 | String tag = generateTag(caller);
156 |
157 | if (customLogger != null) {
158 | customLogger.v(tag, content, tr);
159 | } else {
160 | Log.v(tag, content, tr);
161 | }
162 | }
163 |
164 | public static void w(String content) {
165 | if (!allowW)
166 | return;
167 | StackTraceElement caller = getCallerStackTraceElement();
168 | String tag = generateTag(caller);
169 |
170 | if (customLogger != null) {
171 | customLogger.w(tag, content);
172 | } else {
173 | Log.w(tag, content);
174 | }
175 | }
176 |
177 | public static void w(String content, Throwable tr) {
178 | if (!allowW)
179 | return;
180 | StackTraceElement caller = getCallerStackTraceElement();
181 | String tag = generateTag(caller);
182 |
183 | if (customLogger != null) {
184 | customLogger.w(tag, content, tr);
185 | } else {
186 | Log.w(tag, content, tr);
187 | }
188 | }
189 |
190 | public static void w(Throwable tr) {
191 | if (!allowW)
192 | return;
193 | StackTraceElement caller = getCallerStackTraceElement();
194 | String tag = generateTag(caller);
195 |
196 | if (customLogger != null) {
197 | customLogger.w(tag, tr);
198 | } else {
199 | Log.w(tag, tr);
200 | }
201 | }
202 |
203 | public static void wtf(String content) {
204 | if (!allowWtf)
205 | return;
206 | StackTraceElement caller = getCallerStackTraceElement();
207 | String tag = generateTag(caller);
208 |
209 | if (customLogger != null) {
210 | customLogger.wtf(tag, content);
211 | } else {
212 | Log.wtf(tag, content);
213 | }
214 | }
215 |
216 | public static void wtf(String content, Throwable tr) {
217 | if (!allowWtf)
218 | return;
219 | StackTraceElement caller = getCallerStackTraceElement();
220 | String tag = generateTag(caller);
221 |
222 | if (customLogger != null) {
223 | customLogger.wtf(tag, content, tr);
224 | } else {
225 | Log.wtf(tag, content, tr);
226 | }
227 | }
228 |
229 | public static void wtf(Throwable tr) {
230 | if (!allowWtf)
231 | return;
232 | StackTraceElement caller = getCallerStackTraceElement();
233 | String tag = generateTag(caller);
234 |
235 | if (customLogger != null) {
236 | customLogger.wtf(tag, tr);
237 | } else {
238 | Log.wtf(tag, tr);
239 | }
240 | }
241 |
242 | private static StackTraceElement getCallerStackTraceElement() {
243 | return Thread.currentThread().getStackTrace()[4];
244 | }
245 |
246 | public static void point(String path, String tag, String msg) {
247 | if (isSDAva()) {
248 | Date date = new Date();
249 | SimpleDateFormat dateFormat = new SimpleDateFormat("",
250 | Locale.SIMPLIFIED_CHINESE);
251 | dateFormat.applyPattern("yyyy");
252 | path = path + dateFormat.format(date) + "/";
253 | dateFormat.applyPattern("MM");
254 | path += dateFormat.format(date) + "/";
255 | dateFormat.applyPattern("dd");
256 | path += dateFormat.format(date) + ".log";
257 | dateFormat.applyPattern("[yyyy-MM-dd HH:mm:ss]");
258 | String time = dateFormat.format(date);
259 | File file = new File(path);
260 | if (!file.exists())
261 | createDipPath(path);
262 | BufferedWriter out = null;
263 | try {
264 | out = new BufferedWriter(new OutputStreamWriter(
265 | new FileOutputStream(file, true)));
266 | out.write(time + " " + tag + " " + msg + "\r\n");
267 | } catch (Exception e) {
268 | e.printStackTrace();
269 | } finally {
270 | try {
271 | if (out != null) {
272 | out.close();
273 | }
274 | } catch (IOException e) {
275 | e.printStackTrace();
276 | }
277 | }
278 | }
279 | }
280 |
281 | public static void createDipPath(String file) {
282 | String parentFile = file.substring(0, file.lastIndexOf("/"));
283 | File file1 = new File(file);
284 | File parent = new File(parentFile);
285 | if (!file1.exists()) {
286 | parent.mkdirs();
287 | try {
288 | file1.createNewFile();
289 | } catch (IOException e) {
290 | e.printStackTrace();
291 | }
292 | }
293 | }
294 |
295 | public static String format(String msg, Object... args) {
296 | ReusableFormatter formatter = thread_local_formatter.get();
297 | return formatter.format(msg, args);
298 | }
299 |
300 | public static boolean isSDAva() {
301 | if (Environment.getExternalStorageState().equals(
302 | Environment.MEDIA_MOUNTED)
303 | || Environment.getExternalStorageDirectory().exists()) {
304 | return true;
305 | } else {
306 | return false;
307 | }
308 | }
309 |
310 | public interface CustomLogger {
311 | void d(String tag, String content);
312 |
313 | void d(String tag, String content, Throwable tr);
314 |
315 | void e(String tag, String content);
316 |
317 | void e(String tag, String content, Throwable tr);
318 |
319 | void i(String tag, String content);
320 |
321 | void i(String tag, String content, Throwable tr);
322 |
323 | void v(String tag, String content);
324 |
325 | void v(String tag, String content, Throwable tr);
326 |
327 | void w(String tag, String content);
328 |
329 | void w(String tag, String content, Throwable tr);
330 |
331 | void w(String tag, Throwable tr);
332 |
333 | void wtf(String tag, String content);
334 |
335 | void wtf(String tag, String content, Throwable tr);
336 |
337 | void wtf(String tag, Throwable tr);
338 | }
339 |
340 | private static class ReusableFormatter {
341 |
342 | private Formatter formatter;
343 | private StringBuilder builder;
344 |
345 | public ReusableFormatter() {
346 | builder = new StringBuilder();
347 | formatter = new Formatter(builder);
348 | }
349 |
350 | public String format(String msg, Object... args) {
351 | formatter.format(msg, args);
352 | String s = builder.toString();
353 | builder.setLength(0);
354 | return s;
355 | }
356 | }
357 |
358 | }
359 |
--------------------------------------------------------------------------------
/361camera/src/main/java/org/hunter/a361camera/view/Camera2Fragment.java:
--------------------------------------------------------------------------------
1 | package org.hunter.a361camera.view;
2 |
3 | import android.Manifest;
4 | import android.app.Activity;
5 | import android.app.AlertDialog;
6 | import android.app.Dialog;
7 | import android.app.DialogFragment;
8 | import android.app.Fragment;
9 | import android.content.Context;
10 | import android.content.DialogInterface;
11 | import android.content.pm.PackageManager;
12 | import android.graphics.Bitmap;
13 | import android.graphics.BitmapFactory;
14 | import android.graphics.ImageFormat;
15 | import android.graphics.Matrix;
16 | import android.graphics.Point;
17 | import android.graphics.Rect;
18 | import android.graphics.RectF;
19 | import android.graphics.SurfaceTexture;
20 | import android.hardware.SensorManager;
21 | import android.hardware.camera2.CameraAccessException;
22 | import android.hardware.camera2.CameraCaptureSession;
23 | import android.hardware.camera2.CameraCharacteristics;
24 | import android.hardware.camera2.CameraDevice;
25 | import android.hardware.camera2.CameraManager;
26 | import android.hardware.camera2.CameraMetadata;
27 | import android.hardware.camera2.CaptureFailure;
28 | import android.hardware.camera2.CaptureRequest;
29 | import android.hardware.camera2.CaptureResult;
30 | import android.hardware.camera2.DngCreator;
31 | import android.hardware.camera2.TotalCaptureResult;
32 | import android.hardware.camera2.params.MeteringRectangle;
33 | import android.hardware.camera2.params.StreamConfigurationMap;
34 | import android.media.Image;
35 | import android.media.ImageReader;
36 | import android.media.MediaScannerConnection;
37 | import android.net.Uri;
38 | import android.os.AsyncTask;
39 | import android.os.Bundle;
40 | import android.os.CountDownTimer;
41 | import android.os.Environment;
42 | import android.os.Handler;
43 | import android.os.HandlerThread;
44 | import android.os.Looper;
45 | import android.os.Message;
46 | import android.os.SystemClock;
47 | import android.support.v13.app.FragmentCompat;
48 | import android.support.v4.app.ActivityCompat;
49 | import android.support.v4.view.MotionEventCompat;
50 | import android.util.Log;
51 | import android.util.Size;
52 | import android.util.SparseIntArray;
53 | import android.view.LayoutInflater;
54 | import android.view.MotionEvent;
55 | import android.view.OrientationEventListener;
56 | import android.view.Surface;
57 | import android.view.TextureView;
58 | import android.view.View;
59 | import android.view.ViewGroup;
60 | import android.widget.ImageView;
61 | import android.widget.TextView;
62 | import android.widget.Toast;
63 |
64 | import org.hunter.a361camera.R;
65 | import org.hunter.a361camera.util.AutoFocusHelper;
66 | import org.hunter.a361camera.util.LogUtil;
67 | import org.hunter.a361camera.widget.AutoFitTextureView;
68 |
69 | import java.io.File;
70 | import java.io.FileOutputStream;
71 | import java.io.IOException;
72 | import java.io.OutputStream;
73 | import java.lang.ref.WeakReference;
74 | import java.nio.ByteBuffer;
75 | import java.text.SimpleDateFormat;
76 | import java.util.ArrayList;
77 | import java.util.Arrays;
78 | import java.util.Collections;
79 | import java.util.Comparator;
80 | import java.util.Date;
81 | import java.util.List;
82 | import java.util.Locale;
83 | import java.util.Map;
84 | import java.util.TreeMap;
85 | import java.util.concurrent.Semaphore;
86 | import java.util.concurrent.TimeUnit;
87 | import java.util.concurrent.atomic.AtomicInteger;
88 |
89 | public class Camera2Fragment extends Fragment
90 | implements View.OnClickListener, FragmentCompat.OnRequestPermissionsResultCallback {
91 | public static final String CAMERA_FRONT = "1";
92 | public static final String CAMERA_BACK = "0";
93 | public static final int TIME_INTERVAL = 1000;
94 | public static final int IMAGE_SHOW = 100;
95 | public static final int FOCUS_HIDE = 101;
96 | /**
97 | * Conversion from screen rotation to JPEG orientation.
98 | */
99 | private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
100 | /**
101 | * Request code for camera permissions.
102 | */
103 | private static final int REQUEST_CAMERA_PERMISSIONS = 1;
104 |
105 | /**
106 | * Permissions required to take a picture.
107 | */
108 | private static final String[] CAMERA_PERMISSIONS = {
109 | Manifest.permission.CAMERA,
110 | Manifest.permission.READ_EXTERNAL_STORAGE,
111 | Manifest.permission.WRITE_EXTERNAL_STORAGE,
112 | };
113 |
114 | /**
115 | * Timeout for the pre-capture sequence.
116 | */
117 | private static final long PRECAPTURE_TIMEOUT_MS = 1000;
118 |
119 | /**
120 | * Tolerance when comparing aspect ratios.
121 | */
122 | private static final double ASPECT_RATIO_TOLERANCE = 0.005;
123 |
124 | /**
125 | * Max preview width that is guaranteed by Camera2 API
126 | */
127 | private static final int MAX_PREVIEW_WIDTH = 1920;
128 |
129 | /**
130 | * Max preview height that is guaranteed by Camera2 API
131 | */
132 | private static final int MAX_PREVIEW_HEIGHT = 1080;
133 |
134 | /**
135 | * Tag for the {@link Log}.
136 | */
137 | private static final String TAG = "Camera2RawFragment";
138 |
139 | /**
140 | * Camera state: Device is closed.
141 | */
142 | private static final int STATE_CLOSED = 0;
143 |
144 | /**
145 | * Camera state: Device is opened, but is not capturing.
146 | */
147 | private static final int STATE_OPENED = 1;
148 |
149 | /**
150 | * Camera state: Showing camera preview.
151 | */
152 | private static final int STATE_PREVIEW = 2;
153 |
154 | /**
155 | * Camera state: Waiting for 3A convergence before capturing a photo.
156 | */
157 | private static final int STATE_WAITING_FOR_3A_CONVERGENCE = 3;
158 |
159 | //the time needed to resume continuous focus mode after we tab to focus
160 | private static final int DELAY_TIME_RESUME_CONTINUOUS_AF = 1000;
161 |
162 | private static Camera2Fragment INSTANCE;
163 | private static Camera2Handler mHandler;
164 |
165 | static {
166 | ORIENTATIONS.append(Surface.ROTATION_0, 0);
167 | ORIENTATIONS.append(Surface.ROTATION_90, 90);
168 | ORIENTATIONS.append(Surface.ROTATION_180, 180);
169 | ORIENTATIONS.append(Surface.ROTATION_270, 270);
170 | }
171 |
172 | /**
173 | * A counter for tracking corresponding {@link CaptureRequest}s and {@link CaptureResult}s
174 | * across the {@link CameraCaptureSession} capture callbacks.
175 | */
176 | private final AtomicInteger mRequestCounter = new AtomicInteger();
177 | /**
178 | * A {@link Semaphore} to prevent the app from exiting before closing the camera.
179 | */
180 | private final Semaphore mCameraOpenCloseLock = new Semaphore(1);
181 | /**
182 | * A lock protecting camera state.
183 | */
184 | private final Object mCameraStateLock = new Object();
185 | /**
186 | * Request ID to {@link ImageSaver.ImageSaverBuilder} mapping for in-progress JPEG captures.
187 | */
188 | private final TreeMap mJpegResultQueue = new TreeMap<>();
189 | /**
190 | * Request ID to {@link ImageSaver.ImageSaverBuilder} mapping for in-progress RAW captures.
191 | */
192 | private final TreeMap mRawResultQueue = new TreeMap<>();
193 | /**
194 | * A {@link Handler} for showing {@link Toast}s on the UI thread.
195 | */
196 | private final Handler mMessageHandler = new Handler(Looper.getMainLooper()) {
197 | @Override
198 | public void handleMessage(Message msg) {
199 | Activity activity = getActivity();
200 | if (activity != null) {
201 | Toast.makeText(activity, (String) msg.obj, Toast.LENGTH_SHORT).show();
202 | }
203 | }
204 | };
205 |
206 | // *********************************************************************************************
207 | // State protected by mCameraStateLock.
208 | //
209 | // The following state is used across both the UI and background threads. Methods with "Locked"
210 | // in the name expect mCameraStateLock to be held while calling.
211 | /**
212 | * An {@link OrientationEventListener} used to determine when device rotation has occurred.
213 | * This is mainly necessary for when the device is rotated by 180 degrees, in which case
214 | * onCreate or onConfigurationChanged is not called as the view dimensions remain the same,
215 | * but the orientation of the has changed, and thus the preview rotation must be updated.
216 | */
217 | private OrientationEventListener mOrientationListener;
218 | /**
219 | * An {@link AutoFitTextureView} for camera preview.
220 | */
221 | private AutoFitTextureView mTextureView;
222 | private ImageView mImageShow;
223 | private ImageView mTimer;
224 | private TextView mTimeText;
225 | private ImageView mFlashBtn;
226 | private ImageView mIvFocus;
227 | private ImageView mIvHdr;
228 | //the current surface to preview
229 | private Surface mPreviewSurface;
230 | //current auto focus mode,when we tap to focus, the mode will switch to auto
231 | private AutoFocusMode mControlAFMode = AutoFocusMode.CONTINUOUS_PICTURE;
232 | //focus zero region
233 | private static final MeteringRectangle[] ZERO_WEIGHT_3A_REGION = AutoFocusHelper.getZeroWeightRegion();
234 | private MeteringRectangle[] mAFRegions = ZERO_WEIGHT_3A_REGION;
235 | private MeteringRectangle[] mAERegions = ZERO_WEIGHT_3A_REGION;
236 | /*
237 | * HDR modre, 0 represents hdr off, while to represent hdr on.
238 | */
239 | private int mHdrMode;
240 |
241 | enum AutoFocusMode {
242 | /**
243 | * System is continuously focusing.
244 | */
245 | CONTINUOUS_PICTURE,
246 | /**
247 | * System is running a triggered scan.
248 | */
249 | AUTO;
250 |
251 | int switchToCamera2FocusMode() {
252 | switch (this) {
253 | case CONTINUOUS_PICTURE:
254 | return CameraMetadata.CONTROL_AF_MODE_CONTINUOUS_PICTURE;
255 | case AUTO:
256 | return CameraMetadata.CONTROL_AF_MODE_AUTO;
257 | default:
258 | return CameraMetadata.CONTROL_AF_MODE_CONTINUOUS_PICTURE;
259 | }
260 | }
261 | }
262 |
263 | /*
264 | * Delay state, 0 represents no delay, 1 represents 3s delay, while 2 represents 10s delay
265 | */
266 | private short mDelayState = 0; // Timer
267 | private short mDelayTime;
268 | /*
269 | * Flash mode, 0 represents off, 1 represents auto, while 2 represents on
270 | */
271 | private short mFlashMode = 1;
272 | /**
273 | * An additional thread for running tasks that shouldn't block the UI. This is used for all
274 | * callbacks from the {@link CameraDevice} and {@link CameraCaptureSession}s.
275 | */
276 | private HandlerThread mBackgroundThread;
277 | /**
278 | * ID of the current {@link CameraDevice}.
279 | */
280 | private String mCameraId = CAMERA_BACK; // Default back camera
281 | /**
282 | * A {@link CameraCaptureSession } for camera preview.
283 | */
284 | private CameraCaptureSession mCaptureSession;
285 | /**
286 | * A reference to the open {@link CameraDevice}.
287 | */
288 | private CameraDevice mCameraDevice;
289 | /**
290 | * The {@link Size} of camera preview.
291 | */
292 | private Size mPreviewSize;
293 | /**
294 | * The {@link CameraCharacteristics} for the currently configured camera device.
295 | */
296 | private CameraCharacteristics mCharacteristics;
297 | /**
298 | * A {@link Handler} for running tasks in the background.
299 | */
300 | private Handler mBackgroundHandler;
301 | /**
302 | * A reference counted holder wrapping the {@link ImageReader} that handles JPEG image
303 | * captures. This is used to allow us to clean up the {@link ImageReader} when all background
304 | * tasks using its {@link Image}s have completed.
305 | */
306 | private RefCountedAutoCloseable mJpegImageReader;
307 | /**
308 | * This a callback object for the {@link ImageReader}. "onImageAvailable" will be called when a
309 | * JPEG image is ready to be saved.
310 | */
311 | private final ImageReader.OnImageAvailableListener mOnJpegImageAvailableListener
312 | = new ImageReader.OnImageAvailableListener() {
313 |
314 | @Override
315 | public void onImageAvailable(ImageReader reader) {
316 | dequeueAndSaveImage(mJpegResultQueue, mJpegImageReader);
317 | }
318 |
319 | };
320 | /**
321 | * A reference counted holder wrapping the {@link ImageReader} that handles RAW image captures.
322 | * This is used to allow us to clean up the {@link ImageReader} when all background tasks using
323 | * its {@link Image}s have completed.
324 | */
325 | private RefCountedAutoCloseable mRawImageReader;
326 | /**
327 | * This a callback object for the {@link ImageReader}. "onImageAvailable" will be called when a
328 | * RAW image is ready to be saved.
329 | */
330 | private final ImageReader.OnImageAvailableListener mOnRawImageAvailableListener
331 | = new ImageReader.OnImageAvailableListener() {
332 |
333 | @Override
334 | public void onImageAvailable(ImageReader reader) {
335 | dequeueAndSaveImage(mRawResultQueue, mRawImageReader);
336 | }
337 |
338 | };
339 | /**
340 | * Whether or not the currently configured camera device is fixed-focus.
341 | */
342 | private boolean mNoAFRun = false;
343 |
344 | //**********************************************************************************************
345 | /**
346 | * Number of pending user requests to capture a photo.
347 | */
348 | private int mPendingUserCaptures = 0;
349 | /**
350 | * {@link CaptureRequest.Builder} for the camera preview
351 | */
352 | private CaptureRequest.Builder mPreviewRequestBuilder;
353 | /**
354 | * The state of the camera device.
355 | *
356 | * @see #mPreCaptureCallback
357 | */
358 | private int mState = STATE_CLOSED;
359 | /**
360 | * Timer to use with pre-capture sequence to ensure a timely capture if 3A convergence is
361 | * taking too long.
362 | */
363 | private long mCaptureTimer;
364 | /**
365 | * A {@link CameraCaptureSession.CaptureCallback} that handles events for the preview and
366 | * pre-capture sequence.
367 | */
368 | private CameraCaptureSession.CaptureCallback mPreCaptureCallback
369 | = new CameraCaptureSession.CaptureCallback() {
370 |
371 | private void process(CaptureResult result) {
372 | synchronized (mCameraStateLock) {
373 | switch (mState) {
374 | case STATE_PREVIEW: {
375 | // We have nothing to do when the camera preview is running normally.
376 | break;
377 | }
378 | case STATE_WAITING_FOR_3A_CONVERGENCE: {
379 | boolean readyToCapture = true;
380 | if (!mNoAFRun) {
381 | Integer afState = result.get(CaptureResult.CONTROL_AF_STATE);
382 | if (afState == null) {
383 | break;
384 | }
385 |
386 | // If auto-focus has reached locked state, we are ready to capture
387 | readyToCapture =
388 | (afState == CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED ||
389 | afState == CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
390 | }
391 |
392 | // If we are running on an non-legacy device, we should also wait until
393 | // auto-exposure and auto-white-balance have converged as well before
394 | // taking a picture.
395 | if (!isLegacyLocked()) {
396 | Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
397 | Integer awbState = result.get(CaptureResult.CONTROL_AWB_STATE);
398 | if (aeState == null || awbState == null) {
399 | break;
400 | }
401 |
402 | readyToCapture = readyToCapture &&
403 | aeState == CaptureResult.CONTROL_AE_STATE_CONVERGED &&
404 | awbState == CaptureResult.CONTROL_AWB_STATE_CONVERGED;
405 | }
406 |
407 | // If we haven't finished the pre-capture sequence but have hit our maximum
408 | // wait timeout, too bad! Begin capture anyway.
409 | if (!readyToCapture && hitTimeoutLocked()) {
410 | Log.w(TAG, "Timed out waiting for pre-capture sequence to complete.");
411 | readyToCapture = true;
412 | }
413 |
414 | if (readyToCapture && mPendingUserCaptures > 0) {
415 | // Capture once for each user tap of the "Picture" button.
416 | while (mPendingUserCaptures > 0) {
417 | captureStillPictureLocked();
418 | mPendingUserCaptures--;
419 | }
420 | // After this, the camera will go back to the normal state of preview.
421 | mState = STATE_PREVIEW;
422 | }
423 | }
424 | }
425 | }
426 | }
427 |
428 | @Override
429 | public void onCaptureProgressed(CameraCaptureSession session, CaptureRequest request,
430 | CaptureResult partialResult) {
431 | process(partialResult);
432 | }
433 |
434 | @Override
435 | public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request,
436 | TotalCaptureResult result) {
437 | process(result);
438 | }
439 |
440 | };
441 | /**
442 | * {@link TextureView.SurfaceTextureListener} handles several lifecycle events of a
443 | * {@link TextureView}.
444 | */
445 | private final TextureView.SurfaceTextureListener mSurfaceTextureListener
446 | = new TextureView.SurfaceTextureListener() {
447 |
448 | @Override
449 | public void onSurfaceTextureAvailable(SurfaceTexture texture, int width, int height) {
450 | configureTransform(width, height);
451 | }
452 |
453 | @Override
454 | public void onSurfaceTextureSizeChanged(SurfaceTexture texture, int width, int height) {
455 | configureTransform(width, height);
456 | }
457 |
458 | @Override
459 | public boolean onSurfaceTextureDestroyed(SurfaceTexture texture) {
460 | synchronized (mCameraStateLock) {
461 | mPreviewSize = null;
462 | }
463 | return true;
464 | }
465 |
466 | @Override
467 | public void onSurfaceTextureUpdated(SurfaceTexture texture) {
468 | }
469 |
470 | };
471 | /**
472 | * {@link CameraDevice.StateCallback} is called when the currently active {@link CameraDevice}
473 | * changes its state.
474 | */
475 | private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
476 |
477 | @Override
478 | public void onOpened(CameraDevice cameraDevice) {
479 | // This method is called when the camera is opened. We start camera preview here if
480 | // the TextureView displaying this has been set up.
481 | synchronized (mCameraStateLock) {
482 | mState = STATE_OPENED;
483 | mCameraOpenCloseLock.release();
484 | mCameraDevice = cameraDevice;
485 |
486 | // Start the preview session if the TextureView has been set up already.
487 | if (mPreviewSize != null && mTextureView.isAvailable()) {
488 | createCameraPreviewSessionLocked();
489 | }
490 | }
491 | }
492 |
493 | @Override
494 | public void onDisconnected(CameraDevice cameraDevice) {
495 | synchronized (mCameraStateLock) {
496 | mState = STATE_CLOSED;
497 | mCameraOpenCloseLock.release();
498 | cameraDevice.close();
499 | mCameraDevice = null;
500 | }
501 | }
502 |
503 | @Override
504 | public void onError(CameraDevice cameraDevice, int error) {
505 | Log.e(TAG, "Received camera device error: " + error);
506 | synchronized (mCameraStateLock) {
507 | mState = STATE_CLOSED;
508 | mCameraOpenCloseLock.release();
509 | cameraDevice.close();
510 | mCameraDevice = null;
511 | }
512 | Activity activity = getActivity();
513 | if (null != activity) {
514 | activity.finish();
515 | }
516 | }
517 |
518 | };
519 | /**
520 | * A {@link CameraCaptureSession.CaptureCallback} that handles the still JPEG and RAW capture
521 | * request.
522 | */
523 | private final CameraCaptureSession.CaptureCallback mCaptureCallback
524 | = new CameraCaptureSession.CaptureCallback() {
525 | @Override
526 | public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request,
527 | long timestamp, long frameNumber) {
528 | String currentDateTime = generateTimestamp();
529 | File rawFile = new File(Environment.
530 | getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM),
531 | "RAW_" + currentDateTime + ".dng");
532 | File jpegFile = new File(Environment.
533 | getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM),
534 | "JPEG_" + currentDateTime + ".jpg");
535 |
536 | // Look up the ImageSaverBuilder for this request and update it with the file name
537 | // based on the capture start time.
538 | ImageSaver.ImageSaverBuilder jpegBuilder;
539 | ImageSaver.ImageSaverBuilder rawBuilder;
540 | int requestId = (int) request.getTag();
541 | synchronized (mCameraStateLock) {
542 | jpegBuilder = mJpegResultQueue.get(requestId);
543 | rawBuilder = mRawResultQueue.get(requestId);
544 | }
545 |
546 | if (jpegBuilder != null) jpegBuilder.setFile(jpegFile);
547 | if (rawBuilder != null) rawBuilder.setFile(rawFile);
548 | }
549 |
550 | @Override
551 | public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request,
552 | TotalCaptureResult result) {
553 | int requestId = (int) request.getTag();
554 | ImageSaver.ImageSaverBuilder jpegBuilder;
555 | ImageSaver.ImageSaverBuilder rawBuilder;
556 | StringBuilder sb = new StringBuilder();
557 |
558 | // Look up the ImageSaverBuilder for this request and update it with the CaptureResult
559 | synchronized (mCameraStateLock) {
560 | jpegBuilder = mJpegResultQueue.get(requestId);
561 | rawBuilder = mRawResultQueue.get(requestId);
562 |
563 | if (jpegBuilder != null) {
564 | jpegBuilder.setResult(result);
565 | sb.append("Saving JPEG as: ");
566 | sb.append(jpegBuilder.getSaveLocation());
567 | }
568 | if (rawBuilder != null) {
569 | rawBuilder.setResult(result);
570 | if (jpegBuilder != null) sb.append(", ");
571 | sb.append("Saving RAW as: ");
572 | sb.append(rawBuilder.getSaveLocation());
573 | }
574 |
575 | // If we have all the results necessary, save the image to a file in the background.
576 | handleCompletionLocked(requestId, jpegBuilder, mJpegResultQueue);
577 | handleCompletionLocked(requestId, rawBuilder, mRawResultQueue);
578 |
579 | finishedCaptureLocked();
580 | }
581 |
582 | showToast(sb.toString());
583 | }
584 |
585 | @Override
586 | public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request,
587 | CaptureFailure failure) {
588 | int requestId = (int) request.getTag();
589 | synchronized (mCameraStateLock) {
590 | mJpegResultQueue.remove(requestId);
591 | mRawResultQueue.remove(requestId);
592 | finishedCaptureLocked();
593 | }
594 | showToast("Capture failed!");
595 | }
596 |
597 | };
598 |
599 | public static Camera2Fragment newInstance() {
600 | INSTANCE = new Camera2Fragment();
601 | mHandler = new Camera2Handler(INSTANCE);
602 | return INSTANCE;
603 | }
604 |
605 | /**
606 | * Given {@code choices} of {@code Size}s supported by a camera, choose the smallest one that
607 | * is at least as large as the respective texture view size, and that is at most as large as the
608 | * respective max size, and whose aspect ratio matches with the specified value. If such size
609 | * doesn't exist, choose the largest one that is at most as large as the respective max size,
610 | * and whose aspect ratio matches with the specified value.
611 | *
612 | * @param choices The list of sizes that the camera supports for the intended output
613 | * class
614 | * @param textureViewWidth The width of the texture view relative to sensor coordinate
615 | * @param textureViewHeight The height of the texture view relative to sensor coordinate
616 | * @param maxWidth The maximum width that can be chosen
617 | * @param maxHeight The maximum height that can be chosen
618 | * @param aspectRatio The aspect ratio
619 | * @return The optimal {@code Size}, or an arbitrary one if none were big enough
620 | */
621 | private static Size chooseOptimalSize(Size[] choices, int textureViewWidth,
622 | int textureViewHeight, int maxWidth, int maxHeight, Size aspectRatio) {
623 | // Collect the supported resolutions that are at least as big as the preview Surface
624 | List bigEnough = new ArrayList<>();
625 | // Collect the supported resolutions that are smaller than the preview Surface
626 | List notBigEnough = new ArrayList<>();
627 | int w = aspectRatio.getWidth();
628 | int h = aspectRatio.getHeight();
629 | for (Size option : choices) {
630 | if (option.getWidth() <= maxWidth && option.getHeight() <= maxHeight &&
631 | option.getHeight() == option.getWidth() * h / w) {
632 | if (option.getWidth() >= textureViewWidth &&
633 | option.getHeight() >= textureViewHeight) {
634 | bigEnough.add(option);
635 | } else {
636 | notBigEnough.add(option);
637 | }
638 | }
639 | }
640 |
641 | // Pick the smallest of those big enough. If there is no one big enough, pick the
642 | // largest of those not big enough.
643 | if (bigEnough.size() > 0) {
644 | return Collections.min(bigEnough, new CompareSizesByArea());
645 | } else if (notBigEnough.size() > 0) {
646 | return Collections.max(notBigEnough, new CompareSizesByArea());
647 | } else {
648 | Log.e(TAG, "Couldn't find any suitable preview size");
649 | return choices[0];
650 | }
651 | }
652 |
653 | /**
654 | * Generate a string containing a formatted timestamp with the current date and time.
655 | *
656 | * @return a {@link String} representing a time.
657 | */
658 | private static String generateTimestamp() {
659 | SimpleDateFormat sdf = new SimpleDateFormat("yyyy_MM_dd_HH_mm_ss_SSS", Locale.US);
660 | return sdf.format(new Date());
661 | }
662 |
663 | /**
664 | * Cleanup the given {@link OutputStream}.
665 | *
666 | * @param outputStream the stream to close.
667 | */
668 | private static void closeOutput(OutputStream outputStream) {
669 | if (null != outputStream) {
670 | try {
671 | outputStream.close();
672 | } catch (IOException e) {
673 | e.printStackTrace();
674 | }
675 | }
676 | }
677 |
678 | /**
679 | * Return true if the given array contains the given integer.
680 | *
681 | * @param modes array to check.
682 | * @param mode integer to get for.
683 | * @return true if the array contains the given integer, otherwise false.
684 | */
685 | private static boolean contains(int[] modes, int mode) {
686 | if (modes == null) {
687 | return false;
688 | }
689 | for (int i : modes) {
690 | if (i == mode) {
691 | return true;
692 | }
693 | }
694 | return false;
695 | }
696 |
697 | /**
698 | * Return true if the two given {@link Size}s have the same aspect ratio.
699 | *
700 | * @param a first {@link Size} to compare.
701 | * @param b second {@link Size} to compare.
702 | * @return true if the sizes have the same aspect ratio, otherwise false.
703 | */
704 | private static boolean checkAspectsEqual(Size a, Size b) {
705 | double aAspect = a.getWidth() / (double) a.getHeight();
706 | double bAspect = b.getWidth() / (double) b.getHeight();
707 | return Math.abs(aAspect - bAspect) <= ASPECT_RATIO_TOLERANCE;
708 | }
709 |
710 | /**
711 | * Rotation need to transform from the camera sensor orientation to the device's current
712 | * orientation.
713 | *
714 | * @param c the {@link CameraCharacteristics} to query for the camera sensor
715 | * orientation.
716 | * @param deviceOrientation the current device orientation relative to the native device
717 | * orientation.
718 | * @return the total rotation from the sensor orientation to the current device orientation.
719 | */
720 | private static int sensorToDeviceRotation(CameraCharacteristics c, int deviceOrientation) {
721 | int sensorOrientation = c.get(CameraCharacteristics.SENSOR_ORIENTATION);
722 |
723 | // Get device orientation in degrees
724 | deviceOrientation = ORIENTATIONS.get(deviceOrientation);
725 |
726 | // Reverse device orientation for front-facing cameras
727 | if (c.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT) {
728 | deviceOrientation = -deviceOrientation;
729 | }
730 |
731 | // Calculate desired JPEG orientation relative to camera orientation to make
732 | // the image upright relative to the device orientation
733 | return (sensorOrientation + deviceOrientation + 360) % 360;
734 | }
735 |
736 | @Override
737 | public View onCreateView(LayoutInflater inflater, ViewGroup container,
738 | Bundle savedInstanceState) {
739 | return inflater.inflate(R.layout.fragment_camera2, container, false);
740 | }
741 |
742 | @Override
743 | public void onViewCreated(final View view, Bundle savedInstanceState) {
744 | view.findViewById(R.id.capture).setOnClickListener(this);
745 | view.findViewById(R.id.switch_camera).setOnClickListener(this);
746 | mTextureView = (AutoFitTextureView) view.findViewById(R.id.texture_view_camera2);
747 | mImageShow = (ImageView) view.findViewById(R.id.iv_show_camera2);
748 | mTimer = (ImageView) view.findViewById(R.id.timer);
749 | mTimeText = (TextView) view.findViewById(R.id.timer_text);
750 | mFlashBtn = (ImageView) view.findViewById(R.id.flash);
751 | mIvFocus = (ImageView) view.findViewById(R.id.iv_focus);
752 | mIvHdr = (ImageView) view.findViewById(R.id.hdr);
753 | mTimer.setOnClickListener(this);
754 | mFlashBtn.setOnClickListener(this);
755 | mIvHdr.setOnClickListener(this);
756 |
757 | mTextureView.setOnTouchListener(new View.OnTouchListener() {
758 | @Override
759 | public boolean onTouch(View v, MotionEvent event) {
760 | int actionMasked = MotionEventCompat.getActionMasked(event);
761 | int fingerX, fingerY;
762 | int length = (int) (getResources().getDisplayMetrics().density * 80);
763 | switch (actionMasked) {
764 | case MotionEvent.ACTION_DOWN:
765 | fingerX = (int) event.getX();
766 | fingerY = (int) event.getY();
767 | LogUtil.d("onTouch: x->" + fingerX + ",y->" + fingerY);
768 |
769 | mIvFocus.setX(fingerX - length / 2);
770 | mIvFocus.setY(fingerY - length / 2);
771 |
772 | mIvFocus.setVisibility(View.VISIBLE);
773 | triggerFocusArea(fingerX, fingerY);
774 |
775 | break;
776 | }
777 |
778 | return false;
779 | }
780 | });
781 |
782 | // Setup a new OrientationEventListener. This is used to handle rotation events like a
783 | // 180 degree rotation that do not normally trigger a call to onCreate to do view re-layout
784 | // or otherwise cause the preview TextureView's size to change.
785 | mOrientationListener = new OrientationEventListener(getActivity(),
786 | SensorManager.SENSOR_DELAY_NORMAL) {
787 | @Override
788 | public void onOrientationChanged(int orientation) {
789 | if (mTextureView != null && mTextureView.isAvailable()) {
790 | configureTransform(mTextureView.getWidth(), mTextureView.getHeight());
791 | }
792 | }
793 | };
794 | }
795 |
796 | @Override
797 | public void onResume() {
798 | super.onResume();
799 | startBackgroundThread();
800 | openCamera();
801 |
802 | // When the screen is turned off and turned back on, the SurfaceTexture is already
803 | // available, and "onSurfaceTextureAvailable" will not be called. In that case, we should
804 | // configure the preview bounds here (otherwise, we wait until the surface is ready in
805 | // the SurfaceTextureListener).
806 | if (mTextureView.isAvailable()) {
807 | configureTransform(mTextureView.getWidth(), mTextureView.getHeight());
808 | } else {
809 | mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
810 | }
811 | if (mOrientationListener != null && mOrientationListener.canDetectOrientation()) {
812 | mOrientationListener.enable();
813 | }
814 | }
815 |
816 | @Override
817 | public void onPause() {
818 | if (mOrientationListener != null) {
819 | mOrientationListener.disable();
820 | }
821 | closeCamera();
822 | stopBackgroundThread();
823 | super.onPause();
824 | }
825 |
826 | @Override
827 | public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) {
828 | if (requestCode == REQUEST_CAMERA_PERMISSIONS) {
829 | for (int result : grantResults) {
830 | if (result != PackageManager.PERMISSION_GRANTED) {
831 | showMissingPermissionError();
832 | return;
833 | }
834 | }
835 | } else {
836 | super.onRequestPermissionsResult(requestCode, permissions, grantResults);
837 | }
838 | }
839 |
840 | private void triggerFocusArea(float x, float y) {
841 | CameraManager manager = (CameraManager) getActivity().getSystemService(Context.CAMERA_SERVICE);
842 | try {
843 | CameraCharacteristics characteristics
844 | = manager.getCameraCharacteristics(mCameraId);
845 | Integer sensorOrientation = characteristics.get(
846 | CameraCharacteristics.SENSOR_ORIENTATION);
847 |
848 | sensorOrientation = sensorOrientation == null ? 0 : sensorOrientation;
849 |
850 | Rect cropRegion = AutoFocusHelper.cropRegionForZoom(characteristics, 1f);
851 | mAERegions = AutoFocusHelper.aeRegionsForNormalizedCoord(x, y, cropRegion, sensorOrientation);
852 | mAFRegions = AutoFocusHelper.afRegionsForNormalizedCoord(x, y, cropRegion, sensorOrientation);
853 |
854 | // Step 1: Request single frame CONTROL_AF_TRIGGER_START.
855 | CaptureRequest.Builder builder;
856 | builder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
857 | builder.addTarget(mPreviewSurface);
858 | builder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
859 |
860 | mControlAFMode = AutoFocusMode.AUTO;
861 |
862 | builder.set(CaptureRequest.CONTROL_AF_MODE, mControlAFMode.switchToCamera2FocusMode());
863 | builder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_START);
864 | mCaptureSession.capture(builder.build(), mPreCaptureCallback, mBackgroundHandler);
865 |
866 | // Step 2: Call repeatingPreview to update mControlAFMode.
867 | sendRepeatPreviewRequest();
868 | resumeContinuousAFAfterDelay(DELAY_TIME_RESUME_CONTINUOUS_AF);
869 | } catch (CameraAccessException ex) {
870 | Log.e(TAG, "Could not execute preview request.", ex);
871 | }
872 | }
873 |
874 | private void resumeContinuousAFAfterDelay(int timeMillions) {
875 | mBackgroundHandler.removeCallbacks(mResumePreviewRunnable);
876 | mBackgroundHandler.postDelayed(mResumePreviewRunnable, timeMillions);
877 | }
878 |
879 | //the runnable to resume continuous focus mode after tab to focus
880 | private Runnable mResumePreviewRunnable = new Runnable() {
881 | @Override
882 | public void run() {
883 | mAERegions = ZERO_WEIGHT_3A_REGION;
884 | mAFRegions = ZERO_WEIGHT_3A_REGION;
885 | mControlAFMode = AutoFocusMode.CONTINUOUS_PICTURE;
886 | if (mCameraDevice != null)
887 | sendRepeatPreviewRequest();
888 | Message msg = Message.obtain();
889 | mHandler.sendEmptyMessage(FOCUS_HIDE);
890 | }
891 | };
892 |
893 | private boolean sendRepeatPreviewRequest() {
894 | try {
895 | CaptureRequest.Builder builder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
896 | builder.addTarget(mPreviewSurface);
897 | builder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
898 | builder.set(CaptureRequest.CONTROL_AF_MODE, mControlAFMode.switchToCamera2FocusMode());
899 |
900 | mCaptureSession.setRepeatingRequest(builder.build(), mPreCaptureCallback, mBackgroundHandler);
901 | return true;
902 | } catch (CameraAccessException e) {
903 | e.printStackTrace();
904 | return false;
905 | }
906 | }
907 |
908 | @Override
909 | public void onClick(View view) {
910 | switch (view.getId()) {
911 | case R.id.capture: {
912 | if (mDelayState == 0) {
913 | takePicture();
914 | } else {
915 | new CountDownTimer(mDelayTime, TIME_INTERVAL) {
916 | @Override
917 | public void onTick(long millisUntilFinished) {
918 | mTimeText.setVisibility(View.VISIBLE);
919 | mTimeText.setText("" + millisUntilFinished / TIME_INTERVAL);
920 | }
921 |
922 | @Override
923 | public void onFinish() {
924 | mTimeText.setVisibility(View.GONE);
925 | takePicture();
926 | }
927 | }.start();
928 | }
929 | break;
930 | }
931 | case R.id.switch_camera: {
932 | switchCamera();
933 | break;
934 | }
935 | case R.id.timer:
936 | switchDelayState();
937 | break;
938 | case R.id.flash:
939 | switchFlashMode();
940 | break;
941 | case R.id.hdr:
942 | switchHdrMode();
943 | break;
944 | }
945 | }
946 |
947 | private void switchHdrMode() {
948 | switch (mHdrMode) {
949 | case 0:
950 | mHdrMode = 1;
951 | mIvHdr.setImageResource(R.mipmap.hdr_on);
952 | mPreviewRequestBuilder.set(CaptureRequest.CONTROL_SCENE_MODE, CameraMetadata.CONTROL_SCENE_MODE_HDR);
953 | break;
954 | case 1:
955 | mHdrMode = 0;
956 | mIvHdr.setImageResource(R.mipmap.hdr_off);
957 | mPreviewRequestBuilder.set(CaptureRequest.CONTROL_SCENE_MODE, CameraMetadata.CONTROL_SCENE_MODE_DISABLED);
958 | break;
959 | }
960 | try {
961 | mCaptureSession.setRepeatingRequest(
962 | mPreviewRequestBuilder.build(),
963 | mPreCaptureCallback, mBackgroundHandler);
964 | } catch (CameraAccessException e) {
965 | e.printStackTrace();
966 | return;
967 | }
968 | }
969 |
970 | private void setFlashMode() {
971 | switch (mFlashMode) {
972 | case 0:
973 | mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
974 | mPreviewRequestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
975 | break;
976 | case 1:
977 | mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
978 | break;
979 | case 2:
980 | mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH);
981 | break;
982 | }
983 | }
984 |
985 | private void switchFlashMode() {
986 | switch (mFlashMode) {
987 | case 0:
988 | mFlashMode = 1;
989 | mFlashBtn.setImageResource(R.mipmap.flash_auto);
990 | mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
991 | try {
992 | mCaptureSession.setRepeatingRequest(
993 | mPreviewRequestBuilder.build(),
994 | mPreCaptureCallback, mBackgroundHandler);
995 | } catch (CameraAccessException e) {
996 | e.printStackTrace();
997 | return;
998 | }
999 |
1000 | break;
1001 | case 1:
1002 | mFlashMode = 2;
1003 | mFlashBtn.setImageResource(R.mipmap.flash_on);
1004 | mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH);
1005 | try {
1006 | mCaptureSession.setRepeatingRequest(
1007 | mPreviewRequestBuilder.build(),
1008 | mPreCaptureCallback, mBackgroundHandler);
1009 | } catch (CameraAccessException e) {
1010 | e.printStackTrace();
1011 | return;
1012 | }
1013 | break;
1014 | case 2:
1015 | mFlashMode = 0;
1016 | mFlashBtn.setImageResource(R.mipmap.flash_off);
1017 | mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
1018 | mPreviewRequestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
1019 | try {
1020 | mCaptureSession.setRepeatingRequest(
1021 | mPreviewRequestBuilder.build(),
1022 | mPreCaptureCallback, mBackgroundHandler);
1023 | } catch (CameraAccessException e) {
1024 | e.printStackTrace();
1025 | return;
1026 | }
1027 | break;
1028 | }
1029 | }
1030 |
1031 | private void switchDelayState() {
1032 | switch (mDelayState) {
1033 | case 0:
1034 | mTimer.setImageResource(R.mipmap.ic_3s);
1035 | mDelayTime = 3 * 1000;
1036 | mDelayState = 1;
1037 | break;
1038 | case 1:
1039 | mTimer.setImageResource(R.mipmap.ic_10s);
1040 | mDelayTime = 10 * 1000;
1041 | mDelayState = 2;
1042 | break;
1043 | case 2:
1044 | mTimer.setImageResource(R.mipmap.timer);
1045 | mDelayTime = 0;
1046 | mDelayState = 0;
1047 | break;
1048 | default:
1049 | break;
1050 | }
1051 | }
1052 |
1053 | public void switchCamera() {
1054 | if (mCameraId.equals(CAMERA_FRONT)) {
1055 | mCameraId = CAMERA_BACK;
1056 | closeCamera();
1057 | reopenCamera();
1058 |
1059 | } else if (mCameraId.equals(CAMERA_BACK)) {
1060 | mCameraId = CAMERA_FRONT;
1061 | closeCamera();
1062 | reopenCamera();
1063 | }
1064 | }
1065 |
1066 | public void reopenCamera() {
1067 | if (mTextureView.isAvailable()) {
1068 | openCamera();
1069 | } else {
1070 | mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
1071 | }
1072 | }
1073 |
1074 | /**
1075 | * Sets up state related to camera that is needed before opening a {@link CameraDevice}.
1076 | */
1077 | private boolean setUpCameraOutputs() {
1078 | Activity activity = getActivity();
1079 | CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
1080 | if (manager == null) {
1081 | ErrorDialog.buildErrorDialog("This device doesn't support Camera2 API.").
1082 | show(getFragmentManager(), "dialog");
1083 | return false;
1084 | }
1085 | try {
1086 | // Find a CameraDevice that supports RAW captures, and configure state.
1087 | for (String cameraId : manager.getCameraIdList()) {
1088 | CameraCharacteristics characteristics
1089 | = manager.getCameraCharacteristics(cameraId);
1090 |
1091 | if ((!cameraId.equals(CAMERA_FRONT) && (!cameraId.equals(CAMERA_BACK))
1092 | || (!cameraId.equals(mCameraId)))) {
1093 | continue;
1094 | }
1095 | // We only use a camera that supports RAW in this sample.
1096 | if (!contains(characteristics.get(
1097 | CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES),
1098 | CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW)) {
1099 | continue;
1100 | }
1101 |
1102 | StreamConfigurationMap map = characteristics.get(
1103 | CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
1104 |
1105 | // For still image captures, we use the largest available size.
1106 | Size largestJpeg = Collections.max(
1107 | Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)),
1108 | new CompareSizesByArea());
1109 |
1110 | Size largestRaw = Collections.max(
1111 | Arrays.asList(map.getOutputSizes(ImageFormat.RAW_SENSOR)),
1112 | new CompareSizesByArea());
1113 |
1114 | synchronized (mCameraStateLock) {
1115 | // Set up ImageReaders for JPEG and RAW outputs. Place these in a reference
1116 | // counted wrapper to ensure they are only closed when all background tasks
1117 | // using them are finished.
1118 | if (mJpegImageReader == null || mJpegImageReader.getAndRetain() == null) {
1119 | mJpegImageReader = new RefCountedAutoCloseable<>(
1120 | ImageReader.newInstance(largestJpeg.getWidth(),
1121 | largestJpeg.getHeight(), ImageFormat.JPEG, /*maxImages*/5));
1122 | }
1123 | mJpegImageReader.get().setOnImageAvailableListener(
1124 | mOnJpegImageAvailableListener, mBackgroundHandler);
1125 |
1126 | if (mRawImageReader == null || mRawImageReader.getAndRetain() == null) {
1127 | mRawImageReader = new RefCountedAutoCloseable<>(
1128 | ImageReader.newInstance(largestRaw.getWidth(),
1129 | largestRaw.getHeight(), ImageFormat.RAW_SENSOR, /*maxImages*/ 5));
1130 | }
1131 | mRawImageReader.get().setOnImageAvailableListener(
1132 | mOnRawImageAvailableListener, mBackgroundHandler);
1133 |
1134 | mCharacteristics = characteristics;
1135 | }
1136 | return true;
1137 | }
1138 | } catch (CameraAccessException e) {
1139 | e.printStackTrace();
1140 | }
1141 |
1142 | // If we found no suitable cameras for capturing RAW, warn the user.
1143 | ErrorDialog.buildErrorDialog("This device doesn't support capturing RAW photos").
1144 | show(getFragmentManager(), "dialog");
1145 | return false;
1146 | }
1147 |
1148 | /**
1149 | * Opens the camera specified by {@link #mCameraId}.
1150 | */
1151 | @SuppressWarnings("MissingPermission")
1152 | private void openCamera() {
1153 | if (!setUpCameraOutputs()) {
1154 | return;
1155 | }
1156 | if (!hasAllPermissionsGranted()) {
1157 | requestCameraPermissions();
1158 | return;
1159 | }
1160 |
1161 | Activity activity = getActivity();
1162 | CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
1163 | try {
1164 | // Wait for any previously running session to finish.
1165 | if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
1166 | throw new RuntimeException("Time out waiting to lock camera opening.");
1167 | }
1168 |
1169 | String cameraId;
1170 | Handler backgroundHandler;
1171 | synchronized (mCameraStateLock) {
1172 | cameraId = mCameraId;
1173 | backgroundHandler = mBackgroundHandler;
1174 | }
1175 |
1176 | // Attempt to open the camera. mStateCallback will be called on the background handler's
1177 | // thread when this succeeds or fails.
1178 | manager.openCamera(cameraId, mStateCallback, backgroundHandler);
1179 | } catch (CameraAccessException e) {
1180 | e.printStackTrace();
1181 | } catch (InterruptedException e) {
1182 | throw new RuntimeException("Interrupted while trying to lock camera opening.", e);
1183 | }
1184 | }
1185 |
1186 | /**
1187 | * Requests permissions necessary to use camera and save pictures.
1188 | */
1189 | private void requestCameraPermissions() {
1190 | if (shouldShowRationale()) {
1191 | PermissionConfirmationDialog.newInstance().show(getChildFragmentManager(), "dialog");
1192 | } else {
1193 | FragmentCompat.requestPermissions(this, CAMERA_PERMISSIONS, REQUEST_CAMERA_PERMISSIONS);
1194 | }
1195 | }
1196 |
1197 | /**
1198 | * Tells whether all the necessary permissions are granted to this app.
1199 | *
1200 | * @return True if all the required permissions are granted.
1201 | */
1202 | private boolean hasAllPermissionsGranted() {
1203 | for (String permission : CAMERA_PERMISSIONS) {
1204 | if (ActivityCompat.checkSelfPermission(getActivity(), permission)
1205 | != PackageManager.PERMISSION_GRANTED) {
1206 | return false;
1207 | }
1208 | }
1209 | return true;
1210 | }
1211 |
1212 | /**
1213 | * Gets whether you should show UI with rationale for requesting the permissions.
1214 | *
1215 | * @return True if the UI should be shown.
1216 | */
1217 | private boolean shouldShowRationale() {
1218 | for (String permission : CAMERA_PERMISSIONS) {
1219 | if (FragmentCompat.shouldShowRequestPermissionRationale(this, permission)) {
1220 | return true;
1221 | }
1222 | }
1223 | return false;
1224 | }
1225 |
1226 | /**
1227 | * Shows that this app really needs the permission and finishes the app.
1228 | */
1229 | private void showMissingPermissionError() {
1230 | Activity activity = getActivity();
1231 | if (activity != null) {
1232 | Toast.makeText(activity, R.string.request_permission, Toast.LENGTH_SHORT).show();
1233 | activity.finish();
1234 | }
1235 | }
1236 |
1237 | /**
1238 | * Closes the current {@link CameraDevice}.
1239 | */
1240 | private void closeCamera() {
1241 | try {
1242 | mCameraOpenCloseLock.acquire();
1243 | synchronized (mCameraStateLock) {
1244 |
1245 | // Reset state and clean up resources used by the camera.
1246 | // Note: After calling this, the ImageReaders will be closed after any background
1247 | // tasks saving Images from these readers have been completed.
1248 | mPendingUserCaptures = 0;
1249 | mState = STATE_CLOSED;
1250 | if (null != mCaptureSession) {
1251 | mCaptureSession.close();
1252 | mCaptureSession = null;
1253 | }
1254 | if (null != mCameraDevice) {
1255 | mCameraDevice.close();
1256 | mCameraDevice = null;
1257 | }
1258 | if (null != mJpegImageReader) {
1259 | mJpegImageReader.close();
1260 | mJpegImageReader = null;
1261 | }
1262 | if (null != mRawImageReader) {
1263 | mRawImageReader.close();
1264 | mRawImageReader = null;
1265 | }
1266 | }
1267 | } catch (InterruptedException e) {
1268 | throw new RuntimeException("Interrupted while trying to lock camera closing.", e);
1269 | } finally {
1270 | mCameraOpenCloseLock.release();
1271 | }
1272 | }
1273 |
1274 | /**
1275 | * Starts a background thread and its {@link Handler}.
1276 | */
1277 | private void startBackgroundThread() {
1278 | mBackgroundThread = new HandlerThread("CameraBackground");
1279 | mBackgroundThread.start();
1280 | synchronized (mCameraStateLock) {
1281 | mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
1282 | }
1283 | }
1284 |
1285 | /**
1286 | * Stops the background thread and its {@link Handler}.
1287 | */
1288 | private void stopBackgroundThread() {
1289 | mBackgroundThread.quitSafely();
1290 | try {
1291 | mBackgroundThread.join();
1292 | mBackgroundThread = null;
1293 | synchronized (mCameraStateLock) {
1294 | mBackgroundHandler = null;
1295 | }
1296 | } catch (InterruptedException e) {
1297 | e.printStackTrace();
1298 | }
1299 | }
1300 |
1301 | // Utility classes and methods:
1302 | // *********************************************************************************************
1303 |
1304 | /**
1305 | * Creates a new {@link CameraCaptureSession} for camera preview.
1306 | *
1307 | * Call this only with {@link #mCameraStateLock} held.
1308 | */
1309 | private void createCameraPreviewSessionLocked() {
1310 | try {
1311 | SurfaceTexture texture = mTextureView.getSurfaceTexture();
1312 | // We configure the size of default buffer to be the size of camera preview we want.
1313 | texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
1314 |
1315 | // This is the output Surface we need to start preview.
1316 | mPreviewSurface = new Surface(texture);
1317 |
1318 | // We set up a CaptureRequest.Builder with the output Surface.
1319 | mPreviewRequestBuilder
1320 | = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
1321 | mPreviewRequestBuilder.addTarget(mPreviewSurface);
1322 |
1323 | // Here, we create a CameraCaptureSession for camera preview.
1324 | mCameraDevice.createCaptureSession(Arrays.asList(mPreviewSurface,
1325 | mJpegImageReader.get().getSurface(),
1326 | mRawImageReader.get().getSurface()), new CameraCaptureSession.StateCallback() {
1327 | @Override
1328 | public void onConfigured(CameraCaptureSession cameraCaptureSession) {
1329 | synchronized (mCameraStateLock) {
1330 | // The camera is already closed
1331 | if (null == mCameraDevice) {
1332 | return;
1333 | }
1334 |
1335 | try {
1336 | setup3AControlsLocked(mPreviewRequestBuilder);
1337 | // Default hdr off
1338 | mPreviewRequestBuilder.set(CaptureRequest.CONTROL_SCENE_MODE, CameraMetadata.CONTROL_SCENE_MODE_DISABLED);
1339 | // Finally, we start displaying the camera preview.
1340 | cameraCaptureSession.setRepeatingRequest(
1341 | mPreviewRequestBuilder.build(),
1342 | mPreCaptureCallback, mBackgroundHandler);
1343 | mState = STATE_PREVIEW;
1344 | } catch (CameraAccessException | IllegalStateException e) {
1345 | e.printStackTrace();
1346 | return;
1347 | }
1348 | // When the session is ready, we start displaying the preview.
1349 | mCaptureSession = cameraCaptureSession;
1350 | }
1351 | }
1352 |
1353 | @Override
1354 | public void onConfigureFailed(CameraCaptureSession cameraCaptureSession) {
1355 | showToast("Failed to configure camera.");
1356 | }
1357 | }, mBackgroundHandler
1358 | );
1359 | } catch (CameraAccessException e) {
1360 | e.printStackTrace();
1361 | }
1362 | }
1363 |
1364 | /**
1365 | * Configure the given {@link CaptureRequest.Builder} to use auto-focus, auto-exposure, and
1366 | * auto-white-balance controls if available.
1367 | *
1368 | * Call this only with {@link #mCameraStateLock} held.
1369 | *
1370 | * @param builder the builder to configure.
1371 | */
1372 | private void setup3AControlsLocked(CaptureRequest.Builder builder) {
1373 | // Enable auto-magical 3A run by camera device
1374 | builder.set(CaptureRequest.CONTROL_MODE,
1375 | CaptureRequest.CONTROL_MODE_AUTO);
1376 |
1377 | Float minFocusDist =
1378 | mCharacteristics.get(CameraCharacteristics.LENS_INFO_MINIMUM_FOCUS_DISTANCE);
1379 |
1380 | // If MINIMUM_FOCUS_DISTANCE is 0, lens is fixed-focus and we need to skip the AF run.
1381 | mNoAFRun = (minFocusDist == null || minFocusDist == 0);
1382 |
1383 | if (!mNoAFRun) {
1384 | // If there is a "continuous picture" mode available, use it, otherwise default to AUTO.
1385 | if (contains(mCharacteristics.get(
1386 | CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES),
1387 | CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE)) {
1388 | builder.set(CaptureRequest.CONTROL_AF_MODE,
1389 | CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
1390 | } else {
1391 | builder.set(CaptureRequest.CONTROL_AF_MODE,
1392 | CaptureRequest.CONTROL_AF_MODE_AUTO);
1393 | }
1394 | }
1395 |
1396 | // If there is an auto-magical flash control mode available, use it, otherwise default to
1397 | // the "on" mode, which is guaranteed to always be available.
1398 | if (contains(mCharacteristics.get(
1399 | CameraCharacteristics.CONTROL_AE_AVAILABLE_MODES),
1400 | CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH)) {
1401 | builder.set(CaptureRequest.CONTROL_AE_MODE,
1402 | CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
1403 | } else {
1404 | builder.set(CaptureRequest.CONTROL_AE_MODE,
1405 | CaptureRequest.CONTROL_AE_MODE_ON);
1406 | }
1407 |
1408 | // If there is an auto-magical white balance control mode available, use it.
1409 | if (contains(mCharacteristics.get(
1410 | CameraCharacteristics.CONTROL_AWB_AVAILABLE_MODES),
1411 | CaptureRequest.CONTROL_AWB_MODE_AUTO)) {
1412 | // Allow AWB to run auto-magically if this device supports this
1413 | builder.set(CaptureRequest.CONTROL_AWB_MODE,
1414 | CaptureRequest.CONTROL_AWB_MODE_AUTO);
1415 | }
1416 | }
1417 |
1418 | /**
1419 | * Configure the necessary {@link android.graphics.Matrix} transformation to `mTextureView`,
1420 | * and start/restart the preview capture session if necessary.
1421 | *
1422 | * This method should be called after the camera state has been initialized in
1423 | * setUpCameraOutputs.
1424 | *
1425 | * @param viewWidth The width of `mTextureView`
1426 | * @param viewHeight The height of `mTextureView`
1427 | */
1428 | private void configureTransform(int viewWidth, int viewHeight) {
1429 | Activity activity = getActivity();
1430 | synchronized (mCameraStateLock) {
1431 | if (null == mTextureView || null == activity) {
1432 | return;
1433 | }
1434 |
1435 | StreamConfigurationMap map = mCharacteristics.get(
1436 | CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
1437 |
1438 | // For still image captures, we always use the largest available size.
1439 | Size largestJpeg = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)),
1440 | new CompareSizesByArea());
1441 |
1442 | // Find the rotation of the device relative to the native device orientation.
1443 | int deviceRotation = activity.getWindowManager().getDefaultDisplay().getRotation();
1444 | Point displaySize = new Point();
1445 | activity.getWindowManager().getDefaultDisplay().getSize(displaySize);
1446 |
1447 | // Find the rotation of the device relative to the camera sensor's orientation.
1448 | int totalRotation = sensorToDeviceRotation(mCharacteristics, deviceRotation);
1449 |
1450 | // Swap the view dimensions for calculation as needed if they are rotated relative to
1451 | // the sensor.
1452 | boolean swappedDimensions = totalRotation == 90 || totalRotation == 270;
1453 | int rotatedViewWidth = viewWidth;
1454 | int rotatedViewHeight = viewHeight;
1455 | int maxPreviewWidth = displaySize.x;
1456 | int maxPreviewHeight = displaySize.y;
1457 |
1458 | if (swappedDimensions) {
1459 | rotatedViewWidth = viewHeight;
1460 | rotatedViewHeight = viewWidth;
1461 | maxPreviewWidth = displaySize.y;
1462 | maxPreviewHeight = displaySize.x;
1463 | }
1464 |
1465 | // Preview should not be larger than display size and 1080p.
1466 | if (maxPreviewWidth > MAX_PREVIEW_WIDTH) {
1467 | maxPreviewWidth = MAX_PREVIEW_WIDTH;
1468 | }
1469 |
1470 | if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) {
1471 | maxPreviewHeight = MAX_PREVIEW_HEIGHT;
1472 | }
1473 |
1474 | // Find the best preview size for these view dimensions and configured JPEG size.
1475 | Size previewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class),
1476 | rotatedViewWidth, rotatedViewHeight, maxPreviewWidth, maxPreviewHeight,
1477 | largestJpeg);
1478 |
1479 | if (swappedDimensions) {
1480 | mTextureView.setAspectRatio(
1481 | previewSize.getHeight(), previewSize.getWidth());
1482 | } else {
1483 | mTextureView.setAspectRatio(
1484 | previewSize.getWidth(), previewSize.getHeight());
1485 | }
1486 |
1487 | // Find rotation of device in degrees (reverse device orientation for front-facing
1488 | // cameras).
1489 | int rotation = (mCharacteristics.get(CameraCharacteristics.LENS_FACING) ==
1490 | CameraCharacteristics.LENS_FACING_FRONT) ?
1491 | (360 + ORIENTATIONS.get(deviceRotation)) % 360 :
1492 | (360 - ORIENTATIONS.get(deviceRotation)) % 360;
1493 |
1494 | Matrix matrix = new Matrix();
1495 | RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
1496 | RectF bufferRect = new RectF(0, 0, previewSize.getHeight(), previewSize.getWidth());
1497 | float centerX = viewRect.centerX();
1498 | float centerY = viewRect.centerY();
1499 |
1500 | // Initially, output stream images from the Camera2 API will be rotated to the native
1501 | // device orientation from the sensor's orientation, and the TextureView will default to
1502 | // scaling these buffers to fill it's view bounds. If the aspect ratios and relative
1503 | // orientations are correct, this is fine.
1504 | //
1505 | // However, if the device orientation has been rotated relative to its native
1506 | // orientation so that the TextureView's dimensions are swapped relative to the
1507 | // native device orientation, we must do the following to ensure the output stream
1508 | // images are not incorrectly scaled by the TextureView:
1509 | // - Undo the scale-to-fill from the output buffer's dimensions (i.e. its dimensions
1510 | // in the native device orientation) to the TextureView's dimension.
1511 | // - Apply a scale-to-fill from the output buffer's rotated dimensions
1512 | // (i.e. its dimensions in the current device orientation) to the TextureView's
1513 | // dimensions.
1514 | // - Apply the rotation from the native device orientation to the current device
1515 | // rotation.
1516 | if (Surface.ROTATION_90 == deviceRotation || Surface.ROTATION_270 == deviceRotation) {
1517 | bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
1518 | matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
1519 | float scale = Math.max(
1520 | (float) viewHeight / previewSize.getHeight(),
1521 | (float) viewWidth / previewSize.getWidth());
1522 | matrix.postScale(scale, scale, centerX, centerY);
1523 |
1524 | }
1525 | matrix.postRotate(rotation, centerX, centerY);
1526 |
1527 | mTextureView.setTransform(matrix);
1528 |
1529 | // Start or restart the active capture session if the preview was initialized or
1530 | // if its aspect ratio changed significantly.
1531 | if (mPreviewSize == null || !checkAspectsEqual(previewSize, mPreviewSize)) {
1532 | mPreviewSize = previewSize;
1533 | if (mState != STATE_CLOSED) {
1534 | createCameraPreviewSessionLocked();
1535 | }
1536 | }
1537 | }
1538 | }
1539 |
1540 | /**
1541 | * Initiate a still image capture.
1542 | *
1543 | * This function sends a capture request that initiates a pre-capture sequence in our state
1544 | * machine that waits for auto-focus to finish, ending in a "locked" state where the lens is no
1545 | * longer moving, waits for auto-exposure to choose a good exposure value, and waits for
1546 | * auto-white-balance to converge.
1547 | */
1548 | private void takePicture() {
1549 | synchronized (mCameraStateLock) {
1550 | mPendingUserCaptures++;
1551 |
1552 | // If we already triggered a pre-capture sequence, or are in a state where we cannot
1553 | // do this, return immediately.
1554 | if (mState != STATE_PREVIEW) {
1555 | return;
1556 | }
1557 |
1558 | try {
1559 | // Trigger an auto-focus run if camera is capable. If the camera is already focused,
1560 | // this should do nothing.
1561 | if (!mNoAFRun) {
1562 | mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER,
1563 | CameraMetadata.CONTROL_AF_TRIGGER_START);
1564 | }
1565 |
1566 | // If this is not a legacy device, we can also trigger an auto-exposure metering
1567 | // run.
1568 | if (!isLegacyLocked()) {
1569 | // Tell the camera to lock focus.
1570 | mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
1571 | CameraMetadata.CONTROL_AE_PRECAPTURE_TRIGGER_START);
1572 | }
1573 |
1574 | // Update state machine to wait for auto-focus, auto-exposure, and
1575 | // auto-white-balance (aka. "3A") to converge.
1576 | mState = STATE_WAITING_FOR_3A_CONVERGENCE;
1577 |
1578 | // Start a timer for the pre-capture sequence.
1579 | startTimerLocked();
1580 |
1581 | // Set flash mode
1582 | setFlashMode();
1583 | // Replace the existing repeating request with one with updated 3A triggers.
1584 | mCaptureSession.capture(mPreviewRequestBuilder.build(), mPreCaptureCallback,
1585 | mBackgroundHandler);
1586 | } catch (CameraAccessException e) {
1587 | e.printStackTrace();
1588 | }
1589 | }
1590 | }
1591 |
1592 | /**
1593 | * Send a capture request to the camera device that initiates a capture targeting the JPEG and
1594 | * RAW outputs.
1595 | *
1596 | * Call this only with {@link #mCameraStateLock} held.
1597 | */
1598 | private void captureStillPictureLocked() {
1599 | try {
1600 | final Activity activity = getActivity();
1601 | if (null == activity || null == mCameraDevice) {
1602 | return;
1603 | }
1604 | // This is the CaptureRequest.Builder that we use to take a picture.
1605 | final CaptureRequest.Builder captureBuilder =
1606 | mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
1607 |
1608 | captureBuilder.addTarget(mJpegImageReader.get().getSurface());
1609 | captureBuilder.addTarget(mRawImageReader.get().getSurface());
1610 |
1611 | // Use the same AE and AF modes as the preview.
1612 | setup3AControlsLocked(captureBuilder);
1613 |
1614 | // Set orientation.
1615 | int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
1616 | captureBuilder.set(CaptureRequest.JPEG_ORIENTATION,
1617 | sensorToDeviceRotation(mCharacteristics, rotation));
1618 |
1619 | // Set request tag to easily track results in callbacks.
1620 | captureBuilder.setTag(mRequestCounter.getAndIncrement());
1621 |
1622 | CaptureRequest request = captureBuilder.build();
1623 |
1624 | // Create an ImageSaverBuilder in which to collect results, and add it to the queue
1625 | // of active requests.
1626 | ImageSaver.ImageSaverBuilder jpegBuilder = new ImageSaver.ImageSaverBuilder(activity)
1627 | .setCharacteristics(mCharacteristics);
1628 | ImageSaver.ImageSaverBuilder rawBuilder = new ImageSaver.ImageSaverBuilder(activity)
1629 | .setCharacteristics(mCharacteristics);
1630 |
1631 | mJpegResultQueue.put((int) request.getTag(), jpegBuilder);
1632 | mRawResultQueue.put((int) request.getTag(), rawBuilder);
1633 |
1634 | mCaptureSession.capture(request, mCaptureCallback, mBackgroundHandler);
1635 |
1636 | } catch (CameraAccessException e) {
1637 | e.printStackTrace();
1638 | }
1639 | }
1640 |
1641 | /**
1642 | * Called after a RAW/JPEG capture has completed; resets the AF trigger state for the
1643 | * pre-capture sequence.
1644 | *
1645 | * Call this only with {@link #mCameraStateLock} held.
1646 | */
1647 | private void finishedCaptureLocked() {
1648 | try {
1649 | // Reset the auto-focus trigger in case AF didn't run quickly enough.
1650 | if (!mNoAFRun) {
1651 | mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER,
1652 | CameraMetadata.CONTROL_AF_TRIGGER_CANCEL);
1653 |
1654 | mCaptureSession.capture(mPreviewRequestBuilder.build(), mPreCaptureCallback,
1655 | mBackgroundHandler);
1656 |
1657 | mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER,
1658 | CameraMetadata.CONTROL_AF_TRIGGER_IDLE);
1659 | }
1660 | } catch (CameraAccessException e) {
1661 | e.printStackTrace();
1662 | }
1663 | }
1664 |
1665 | /**
1666 | * Retrieve the next {@link Image} from a reference counted {@link ImageReader}, retaining
1667 | * that {@link ImageReader} until that {@link Image} is no longer in use, and set this
1668 | * {@link Image} as the result for the next request in the queue of pending requests. If
1669 | * all necessary information is available, begin saving the image to a file in a background
1670 | * thread.
1671 | *
1672 | * @param pendingQueue the currently active requests.
1673 | * @param reader a reference counted wrapper containing an {@link ImageReader} from which
1674 | * to acquire an image.
1675 | */
1676 | private void dequeueAndSaveImage(TreeMap pendingQueue,
1677 | RefCountedAutoCloseable reader) {
1678 | synchronized (mCameraStateLock) {
1679 | Map.Entry entry =
1680 | pendingQueue.firstEntry();
1681 | ImageSaver.ImageSaverBuilder builder = entry.getValue();
1682 |
1683 | // Increment reference count to prevent ImageReader from being closed while we
1684 | // are saving its Images in a background thread (otherwise their resources may
1685 | // be freed while we are writing to a file).
1686 | if (reader == null || reader.getAndRetain() == null) {
1687 | Log.e(TAG, "Paused the activity before we could save the image," +
1688 | " ImageReader already closed.");
1689 | pendingQueue.remove(entry.getKey());
1690 | return;
1691 | }
1692 |
1693 | Image image;
1694 | try {
1695 | image = reader.get().acquireNextImage();
1696 | } catch (IllegalStateException e) {
1697 | Log.e(TAG, "Too many images queued for saving, dropping image for request: " +
1698 | entry.getKey());
1699 | pendingQueue.remove(entry.getKey());
1700 | return;
1701 | }
1702 |
1703 | builder.setRefCountedReader(reader).setImage(image);
1704 |
1705 | handleCompletionLocked(entry.getKey(), builder, pendingQueue);
1706 | }
1707 | }
1708 |
1709 | /**
1710 | * Shows a {@link Toast} on the UI thread.
1711 | *
1712 | * @param text The message to show.
1713 | */
1714 | private void showToast(String text) {
1715 | // We show a Toast by sending request message to mMessageHandler. This makes sure that the
1716 | // Toast is shown on the UI thread.
1717 | Message message = Message.obtain();
1718 | message.obj = text;
1719 | mMessageHandler.sendMessage(message);
1720 | }
1721 |
1722 | /**
1723 | * If the given request has been completed, remove it from the queue of active requests and
1724 | * send an {@link ImageSaver} with the results from this request to a background thread to
1725 | * save a file.
1726 | *
1727 | * Call this only with {@link #mCameraStateLock} held.
1728 | *
1729 | * @param requestId the ID of the {@link CaptureRequest} to handle.
1730 | * @param builder the {@link ImageSaver.ImageSaverBuilder} for this request.
1731 | * @param queue the queue to remove this request from, if completed.
1732 | */
1733 | private void handleCompletionLocked(int requestId, ImageSaver.ImageSaverBuilder builder,
1734 | TreeMap queue) {
1735 | if (builder == null) return;
1736 | ImageSaver saver = builder.buildIfComplete();
1737 | if (saver != null) {
1738 | queue.remove(requestId);
1739 | AsyncTask.THREAD_POOL_EXECUTOR.execute(saver);
1740 | }
1741 | }
1742 |
1743 | /**
1744 | * Check if we are using a device that only supports the LEGACY hardware level.
1745 | *
1746 | * Call this only with {@link #mCameraStateLock} held.
1747 | *
1748 | * @return true if this is a legacy device.
1749 | */
1750 | private boolean isLegacyLocked() {
1751 | return mCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL) ==
1752 | CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY;
1753 | }
1754 |
1755 | /**
1756 | * Start the timer for the pre-capture sequence.
1757 | *
1758 | * Call this only with {@link #mCameraStateLock} held.
1759 | */
1760 | private void startTimerLocked() {
1761 | mCaptureTimer = SystemClock.elapsedRealtime();
1762 | }
1763 |
1764 | /**
1765 | * Check if the timer for the pre-capture sequence has been hit.
1766 | *
1767 | * Call this only with {@link #mCameraStateLock} held.
1768 | *
1769 | * @return true if the timeout occurred.
1770 | */
1771 | private boolean hitTimeoutLocked() {
1772 | return (SystemClock.elapsedRealtime() - mCaptureTimer) > PRECAPTURE_TIMEOUT_MS;
1773 | }
1774 |
1775 | private void showIamge(Bitmap bitmap) {
1776 | mImageShow.setImageBitmap(bitmap);
1777 | }
1778 |
1779 | /**
1780 | * Runnable that saves an {@link Image} into the specified {@link File}, and updates
1781 | * {@link android.provider.MediaStore} to include the resulting file.
1782 | *
1783 | * This can be constructed through an {@link ImageSaverBuilder} as the necessary image and
1784 | * result information becomes available.
1785 | */
1786 | private static class ImageSaver implements Runnable {
1787 |
1788 | /**
1789 | * The image to save.
1790 | */
1791 | private final Image mImage;
1792 | /**
1793 | * The file we save the image into.
1794 | */
1795 | private final File mFile;
1796 |
1797 | /**
1798 | * The CaptureResult for this image capture.
1799 | */
1800 | private final CaptureResult mCaptureResult;
1801 |
1802 | /**
1803 | * The CameraCharacteristics for this camera device.
1804 | */
1805 | private final CameraCharacteristics mCharacteristics;
1806 |
1807 | /**
1808 | * The Context to use when updating MediaStore with the saved images.
1809 | */
1810 | private final Context mContext;
1811 |
1812 | /**
1813 | * A reference counted wrapper for the ImageReader that owns the given image.
1814 | */
1815 | private final RefCountedAutoCloseable mReader;
1816 |
1817 | private ImageSaver(Image image, File file, CaptureResult result,
1818 | CameraCharacteristics characteristics, Context context,
1819 | RefCountedAutoCloseable reader) {
1820 | mImage = image;
1821 | mFile = file;
1822 | mCaptureResult = result;
1823 | mCharacteristics = characteristics;
1824 | mContext = context;
1825 | mReader = reader;
1826 | }
1827 |
1828 | @Override
1829 | public void run() {
1830 | boolean success = false;
1831 | int format = mImage.getFormat();
1832 | switch (format) {
1833 | case ImageFormat.JPEG: {
1834 | ByteBuffer buffer = mImage.getPlanes()[0].getBuffer();
1835 | byte[] bytes = new byte[buffer.remaining()];
1836 | buffer.get(bytes);
1837 | final Bitmap bitmap = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
1838 | Message msg = Message.obtain();
1839 | msg.what = IMAGE_SHOW;
1840 | msg.obj = bitmap;
1841 | mHandler.sendMessage(msg);
1842 | FileOutputStream output = null;
1843 | try {
1844 | output = new FileOutputStream(mFile);
1845 | output.write(bytes);
1846 | success = true;
1847 | } catch (IOException e) {
1848 | e.printStackTrace();
1849 | } finally {
1850 | mImage.close();
1851 | closeOutput(output);
1852 | }
1853 | break;
1854 | }
1855 | case ImageFormat.RAW_SENSOR: {
1856 | DngCreator dngCreator = new DngCreator(mCharacteristics, mCaptureResult);
1857 | FileOutputStream output = null;
1858 | try {
1859 | output = new FileOutputStream(mFile);
1860 | dngCreator.writeImage(output, mImage);
1861 | success = true;
1862 | } catch (IOException e) {
1863 | e.printStackTrace();
1864 | } finally {
1865 | mImage.close();
1866 | closeOutput(output);
1867 | }
1868 | break;
1869 | }
1870 | default: {
1871 | Log.e(TAG, "Cannot save image, unexpected image format:" + format);
1872 | break;
1873 | }
1874 | }
1875 |
1876 | // Decrement reference count to allow ImageReader to be closed to free up resources.
1877 | mReader.close();
1878 |
1879 | // If saving the file succeeded, update MediaStore.
1880 | if (success) {
1881 | MediaScannerConnection.scanFile(mContext, new String[]{mFile.getPath()},
1882 | /*mimeTypes*/null, new MediaScannerConnection.MediaScannerConnectionClient() {
1883 | @Override
1884 | public void onMediaScannerConnected() {
1885 | // Do nothing
1886 | }
1887 |
1888 | @Override
1889 | public void onScanCompleted(String path, Uri uri) {
1890 | Log.i(TAG, "Scanned " + path + ":");
1891 | Log.i(TAG, "-> uri=" + uri);
1892 | }
1893 | });
1894 | }
1895 | }
1896 |
1897 | /**
1898 | * Builder class for constructing {@link ImageSaver}s.
1899 | *
1900 | * This class is thread safe.
1901 | */
1902 | public static class ImageSaverBuilder {
1903 | private Image mImage;
1904 | private File mFile;
1905 | private CaptureResult mCaptureResult;
1906 | private CameraCharacteristics mCharacteristics;
1907 | private Context mContext;
1908 | private RefCountedAutoCloseable mReader;
1909 |
1910 | /**
1911 | * Construct a new ImageSaverBuilder using the given {@link Context}.
1912 | *
1913 | * @param context a {@link Context} to for accessing the
1914 | * {@link android.provider.MediaStore}.
1915 | */
1916 | public ImageSaverBuilder(final Context context) {
1917 | mContext = context;
1918 | }
1919 |
1920 | public synchronized ImageSaverBuilder setRefCountedReader(
1921 | RefCountedAutoCloseable reader) {
1922 | if (reader == null) throw new NullPointerException();
1923 |
1924 | mReader = reader;
1925 | return this;
1926 | }
1927 |
1928 | public synchronized ImageSaverBuilder setImage(final Image image) {
1929 | if (image == null) throw new NullPointerException();
1930 | mImage = image;
1931 | return this;
1932 | }
1933 |
1934 | public synchronized ImageSaverBuilder setFile(final File file) {
1935 | if (file == null) throw new NullPointerException();
1936 | mFile = file;
1937 | return this;
1938 | }
1939 |
1940 | public synchronized ImageSaverBuilder setResult(final CaptureResult result) {
1941 | if (result == null) throw new NullPointerException();
1942 | mCaptureResult = result;
1943 | return this;
1944 | }
1945 |
1946 | public synchronized ImageSaverBuilder setCharacteristics(
1947 | final CameraCharacteristics characteristics) {
1948 | if (characteristics == null) throw new NullPointerException();
1949 | mCharacteristics = characteristics;
1950 | return this;
1951 | }
1952 |
1953 | public synchronized ImageSaver buildIfComplete() {
1954 | if (!isComplete()) {
1955 | return null;
1956 | }
1957 | return new ImageSaver(mImage, mFile, mCaptureResult, mCharacteristics, mContext,
1958 | mReader);
1959 | }
1960 |
1961 | public synchronized String getSaveLocation() {
1962 | return (mFile == null) ? "Unknown" : mFile.toString();
1963 | }
1964 |
1965 | private boolean isComplete() {
1966 | return mImage != null && mFile != null && mCaptureResult != null
1967 | && mCharacteristics != null;
1968 | }
1969 | }
1970 | }
1971 |
1972 | /**
1973 | * Comparator based on area of the given {@link Size} objects.
1974 | */
1975 | static class CompareSizesByArea implements Comparator {
1976 |
1977 | @Override
1978 | public int compare(Size lhs, Size rhs) {
1979 | // We cast here to ensure the multiplications won't overflow
1980 | return Long.signum((long) lhs.getWidth() * lhs.getHeight() -
1981 | (long) rhs.getWidth() * rhs.getHeight());
1982 | }
1983 |
1984 | }
1985 |
1986 | /**
1987 | * A dialog fragment for displaying non-recoverable errors; this {@ling Activity} will be
1988 | * finished once the dialog has been acknowledged by the user.
1989 | */
1990 | public static class ErrorDialog extends DialogFragment {
1991 |
1992 | private String mErrorMessage;
1993 |
1994 | public ErrorDialog() {
1995 | mErrorMessage = "Unknown error occurred!";
1996 | }
1997 |
1998 | // Build a dialog with a custom message (Fragments require default constructor).
1999 | public static ErrorDialog buildErrorDialog(String errorMessage) {
2000 | ErrorDialog dialog = new ErrorDialog();
2001 | dialog.mErrorMessage = errorMessage;
2002 | return dialog;
2003 | }
2004 |
2005 | @Override
2006 | public Dialog onCreateDialog(Bundle savedInstanceState) {
2007 | final Activity activity = getActivity();
2008 | return new AlertDialog.Builder(activity)
2009 | .setMessage(mErrorMessage)
2010 | .setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
2011 | @Override
2012 | public void onClick(DialogInterface dialogInterface, int i) {
2013 | activity.finish();
2014 | }
2015 | })
2016 | .create();
2017 | }
2018 | }
2019 |
2020 | /**
2021 | * A wrapper for an {@link AutoCloseable} object that implements reference counting to allow
2022 | * for resource management.
2023 | */
2024 | public static class RefCountedAutoCloseable implements AutoCloseable {
2025 | private T mObject;
2026 | private long mRefCount = 0;
2027 |
2028 | /**
2029 | * Wrap the given object.
2030 | *
2031 | * @param object an object to wrap.
2032 | */
2033 | public RefCountedAutoCloseable(T object) {
2034 | if (object == null) throw new NullPointerException();
2035 | mObject = object;
2036 | }
2037 |
2038 | /**
2039 | * Increment the reference count and return the wrapped object.
2040 | *
2041 | * @return the wrapped object, or null if the object has been released.
2042 | */
2043 | public synchronized T getAndRetain() {
2044 | if (mRefCount < 0) {
2045 | return null;
2046 | }
2047 | mRefCount++;
2048 | return mObject;
2049 | }
2050 |
2051 | /**
2052 | * Return the wrapped object.
2053 | *
2054 | * @return the wrapped object, or null if the object has been released.
2055 | */
2056 | public synchronized T get() {
2057 | return mObject;
2058 | }
2059 |
2060 | /**
2061 | * Decrement the reference count and release the wrapped object if there are no other
2062 | * users retaining this object.
2063 | */
2064 | @Override
2065 | public synchronized void close() {
2066 | if (mRefCount >= 0) {
2067 | mRefCount--;
2068 | if (mRefCount < 0) {
2069 | try {
2070 | mObject.close();
2071 | } catch (Exception e) {
2072 | throw new RuntimeException(e);
2073 | } finally {
2074 | mObject = null;
2075 | }
2076 | }
2077 | }
2078 | }
2079 | }
2080 |
2081 | /**
2082 | * A dialog that explains about the necessary permissions.
2083 | */
2084 | public static class PermissionConfirmationDialog extends DialogFragment {
2085 |
2086 | public static PermissionConfirmationDialog newInstance() {
2087 | return new PermissionConfirmationDialog();
2088 | }
2089 |
2090 | @Override
2091 | public Dialog onCreateDialog(Bundle savedInstanceState) {
2092 | final Fragment parent = getParentFragment();
2093 | return new AlertDialog.Builder(getActivity())
2094 | .setMessage(R.string.request_permission)
2095 | .setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
2096 | @Override
2097 | public void onClick(DialogInterface dialog, int which) {
2098 | FragmentCompat.requestPermissions(parent, CAMERA_PERMISSIONS,
2099 | REQUEST_CAMERA_PERMISSIONS);
2100 | }
2101 | })
2102 | .setNegativeButton(android.R.string.cancel,
2103 | new DialogInterface.OnClickListener() {
2104 | @Override
2105 | public void onClick(DialogInterface dialog, int which) {
2106 | getActivity().finish();
2107 | }
2108 | })
2109 | .create();
2110 | }
2111 |
2112 | }
2113 |
2114 | private static class Camera2Handler extends Handler {
2115 | private WeakReference fragment;
2116 |
2117 | public Camera2Handler(Camera2Fragment fragment) {
2118 | this.fragment = new WeakReference<>(fragment);
2119 | }
2120 |
2121 | @Override
2122 | public void handleMessage(Message msg) {
2123 | switch (msg.what) {
2124 | case IMAGE_SHOW:
2125 | fragment.get().showIamge((Bitmap) msg.obj);
2126 | break;
2127 | case FOCUS_HIDE:
2128 | fragment.get().mIvFocus.setVisibility(View.INVISIBLE);
2129 | break;
2130 | default:
2131 | break;
2132 | }
2133 | }
2134 | }
2135 | }
2136 |
2137 |
--------------------------------------------------------------------------------
/361camera/src/main/java/org/hunter/a361camera/widget/AutoFitTextureView.java:
--------------------------------------------------------------------------------
1 | package org.hunter.a361camera.widget;
2 |
3 | import android.content.Context;
4 | import android.util.AttributeSet;
5 | import android.view.TextureView;
6 |
7 | /**
8 | * A {@link TextureView} that can be adjusted to a specified aspect ratio.
9 | */
10 | public class AutoFitTextureView extends TextureView {
11 |
12 | private int mRatioWidth = 0;
13 | private int mRatioHeight = 0;
14 |
15 | public AutoFitTextureView(Context context) {
16 | this(context, null);
17 | }
18 |
19 | public AutoFitTextureView(Context context, AttributeSet attrs) {
20 | this(context, attrs, 0);
21 | }
22 |
23 | public AutoFitTextureView(Context context, AttributeSet attrs, int defStyle) {
24 | super(context, attrs, defStyle);
25 | }
26 |
27 | /**
28 | * Sets the aspect ratio for this view. The size of the view will be measured based on the ratio
29 | * calculated from the parameters. Note that the actual sizes of parameters don't matter, that
30 | * is, calling setAspectRatio(2, 3) and setAspectRatio(4, 6) make the same result.
31 | *
32 | * @param width Relative horizontal size
33 | * @param height Relative vertical size
34 | */
35 | public void setAspectRatio(int width, int height) {
36 | if (width < 0 || height < 0) {
37 | throw new IllegalArgumentException("Size cannot be negative.");
38 | }
39 | if (mRatioWidth == width && mRatioHeight == height) {
40 | return;
41 | }
42 | mRatioWidth = width;
43 | mRatioHeight = height;
44 | requestLayout();
45 | }
46 |
47 | @Override
48 | protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
49 | super.onMeasure(widthMeasureSpec, heightMeasureSpec);
50 | int width = MeasureSpec.getSize(widthMeasureSpec);
51 | int height = MeasureSpec.getSize(heightMeasureSpec);
52 | if (0 == mRatioWidth || 0 == mRatioHeight) {
53 | setMeasuredDimension(width, height);
54 | } else {
55 | if (width < height * mRatioWidth / mRatioHeight) {
56 | setMeasuredDimension(width, width * mRatioHeight / mRatioWidth);
57 | } else {
58 | setMeasuredDimension(height * mRatioWidth / mRatioHeight, height);
59 | }
60 | }
61 | }
62 |
63 | }
64 |
--------------------------------------------------------------------------------
/361camera/src/main/java/org/hunter/a361camera/widget/PorterDuffImageView.java:
--------------------------------------------------------------------------------
1 | package org.hunter.a361camera.widget;
2 |
3 | import android.content.Context;
4 | import android.graphics.Bitmap;
5 | import android.graphics.Canvas;
6 | import android.graphics.Paint;
7 | import android.graphics.PorterDuff;
8 | import android.graphics.PorterDuffXfermode;
9 | import android.graphics.RectF;
10 | import android.graphics.Xfermode;
11 | import android.graphics.drawable.BitmapDrawable;
12 | import android.util.AttributeSet;
13 | import android.util.Log;
14 | import android.widget.ImageView;
15 |
16 | public class PorterDuffImageView extends ImageView {
17 | private static Xfermode xfermode;
18 | private Paint mpaint;
19 | private Bitmap bitmap;
20 | private RectF rect;
21 |
22 | public PorterDuffImageView(Context context) {
23 | super(context);
24 | init();
25 | }
26 |
27 | public PorterDuffImageView(Context context, AttributeSet attrs) {
28 | super(context, attrs);
29 | init();
30 | }
31 |
32 | public PorterDuffImageView(Context context, AttributeSet attrs, int defStyleAttr) {
33 | super(context, attrs, defStyleAttr);
34 | init();
35 | }
36 |
37 | private void init() {
38 | setLayerType(LAYER_TYPE_SOFTWARE, null);
39 | xfermode = new PorterDuffXfermode(PorterDuff.Mode.DST_IN);
40 | mpaint = new Paint(Paint.ANTI_ALIAS_FLAG);
41 | }
42 |
43 | @Override
44 | protected void onDraw(Canvas canvas) {
45 | super.onDraw(canvas);
46 | int width = getWidth();
47 | int height = getHeight();
48 | Bitmap.Config config = Bitmap.Config.ARGB_8888;
49 | Canvas canvas1 = null;
50 | BitmapDrawable drawable = (BitmapDrawable) getDrawable();
51 | // If image is not set, return
52 | if (drawable == null) {
53 | return;
54 | }
55 | drawable.setBounds(0, 0, width, height);
56 | drawable.draw(canvas);
57 | if (bitmap == null) {
58 | Log.i("porterduffviewimage", "bitmap==null");
59 | Paint paint = new Paint(Paint.ANTI_ALIAS_FLAG);
60 | rect = new RectF(0, 0, width, height);
61 | bitmap = Bitmap.createBitmap(width, height, config);
62 | canvas1 = new Canvas(bitmap);
63 | canvas1.drawOval(rect, paint);
64 | }
65 | mpaint.setXfermode(xfermode);
66 | canvas.drawBitmap(bitmap, 0, 0, mpaint);
67 | }
68 |
69 | }
70 |
--------------------------------------------------------------------------------
/361camera/src/main/res/anim/scale.xml:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/361camera/src/main/res/drawable/focus_area.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
8 |
--------------------------------------------------------------------------------
/361camera/src/main/res/layout-land/fragment_camera2.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
6 |
7 |
11 |
12 |
16 |
17 |
25 |
26 |
32 |
33 |
34 |
35 |
46 |
47 |
55 |
56 |
63 |
64 |
71 |
72 |
73 |
74 |
82 |
83 |
86 |
87 |
94 |
95 |
102 |
103 |
110 |
111 |
112 |
113 |
--------------------------------------------------------------------------------
/361camera/src/main/res/layout/activity_main.xml:
--------------------------------------------------------------------------------
1 |
2 |
8 |
9 |
13 |
14 |
--------------------------------------------------------------------------------
/361camera/src/main/res/layout/fragment_camera2.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
6 |
7 |
11 |
12 |
16 |
17 |
25 |
26 |
32 |
33 |
34 |
39 |
40 |
44 |
45 |
52 |
53 |
60 |
61 |
68 |
69 |
70 |
71 |
78 |
79 |
87 |
88 |
95 |
96 |
103 |
104 |
105 |
106 |
--------------------------------------------------------------------------------
/361camera/src/main/res/mipmap-hdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gengqifu/361Camera/ea1eb1df9547b96ecbfe27b6f02bdddf4874055b/361camera/src/main/res/mipmap-hdpi/ic_launcher.png
--------------------------------------------------------------------------------
/361camera/src/main/res/mipmap-mdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gengqifu/361Camera/ea1eb1df9547b96ecbfe27b6f02bdddf4874055b/361camera/src/main/res/mipmap-mdpi/ic_launcher.png
--------------------------------------------------------------------------------
/361camera/src/main/res/mipmap-xhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gengqifu/361Camera/ea1eb1df9547b96ecbfe27b6f02bdddf4874055b/361camera/src/main/res/mipmap-xhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/361camera/src/main/res/mipmap-xxhdpi/capture.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gengqifu/361Camera/ea1eb1df9547b96ecbfe27b6f02bdddf4874055b/361camera/src/main/res/mipmap-xxhdpi/capture.png
--------------------------------------------------------------------------------
/361camera/src/main/res/mipmap-xxhdpi/flash_auto.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gengqifu/361Camera/ea1eb1df9547b96ecbfe27b6f02bdddf4874055b/361camera/src/main/res/mipmap-xxhdpi/flash_auto.png
--------------------------------------------------------------------------------
/361camera/src/main/res/mipmap-xxhdpi/flash_off.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gengqifu/361Camera/ea1eb1df9547b96ecbfe27b6f02bdddf4874055b/361camera/src/main/res/mipmap-xxhdpi/flash_off.png
--------------------------------------------------------------------------------
/361camera/src/main/res/mipmap-xxhdpi/flash_on.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gengqifu/361Camera/ea1eb1df9547b96ecbfe27b6f02bdddf4874055b/361camera/src/main/res/mipmap-xxhdpi/flash_on.png
--------------------------------------------------------------------------------
/361camera/src/main/res/mipmap-xxhdpi/hdr_off.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gengqifu/361Camera/ea1eb1df9547b96ecbfe27b6f02bdddf4874055b/361camera/src/main/res/mipmap-xxhdpi/hdr_off.png
--------------------------------------------------------------------------------
/361camera/src/main/res/mipmap-xxhdpi/hdr_on.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gengqifu/361Camera/ea1eb1df9547b96ecbfe27b6f02bdddf4874055b/361camera/src/main/res/mipmap-xxhdpi/hdr_on.png
--------------------------------------------------------------------------------
/361camera/src/main/res/mipmap-xxhdpi/ic_10s.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gengqifu/361Camera/ea1eb1df9547b96ecbfe27b6f02bdddf4874055b/361camera/src/main/res/mipmap-xxhdpi/ic_10s.png
--------------------------------------------------------------------------------
/361camera/src/main/res/mipmap-xxhdpi/ic_3s.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gengqifu/361Camera/ea1eb1df9547b96ecbfe27b6f02bdddf4874055b/361camera/src/main/res/mipmap-xxhdpi/ic_3s.png
--------------------------------------------------------------------------------
/361camera/src/main/res/mipmap-xxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gengqifu/361Camera/ea1eb1df9547b96ecbfe27b6f02bdddf4874055b/361camera/src/main/res/mipmap-xxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/361camera/src/main/res/mipmap-xxhdpi/switch_camera.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gengqifu/361Camera/ea1eb1df9547b96ecbfe27b6f02bdddf4874055b/361camera/src/main/res/mipmap-xxhdpi/switch_camera.png
--------------------------------------------------------------------------------
/361camera/src/main/res/mipmap-xxhdpi/timer.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gengqifu/361Camera/ea1eb1df9547b96ecbfe27b6f02bdddf4874055b/361camera/src/main/res/mipmap-xxhdpi/timer.png
--------------------------------------------------------------------------------
/361camera/src/main/res/mipmap-xxxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gengqifu/361Camera/ea1eb1df9547b96ecbfe27b6f02bdddf4874055b/361camera/src/main/res/mipmap-xxxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/361camera/src/main/res/values-w820dp/dimens.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 | 64dp
6 |
7 |
--------------------------------------------------------------------------------
/361camera/src/main/res/values/colors.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | #3F51B5
4 | #303F9F
5 | #FF4081
6 |
7 |
--------------------------------------------------------------------------------
/361camera/src/main/res/values/dimens.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | 16dp
4 | 16dp
5 |
6 |
--------------------------------------------------------------------------------
/361camera/src/main/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 | 361Camera
3 | 需要访问相机的权限
4 |
5 |
--------------------------------------------------------------------------------
/361camera/src/main/res/values/styles.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/361camera/src/test/java/org/hunter/a361camera/ExampleUnitTest.java:
--------------------------------------------------------------------------------
1 | package org.hunter.a361camera;
2 |
3 | import org.junit.Test;
4 |
5 | import static org.junit.Assert.assertEquals;
6 |
7 | /**
8 | * Example local unit test, which will execute on the development machine (host).
9 | *
10 | * @see Testing documentation
11 | */
12 | public class ExampleUnitTest {
13 | @Test
14 | public void addition_isCorrect() throws Exception {
15 | assertEquals(4, 2 + 2);
16 | }
17 | }
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "{}"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright {yyyy} {name of copyright owner}
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # 361Camera
2 | An Android camera application
3 | 1. Support both JPEG and RAW data.
4 | 2. Support switch between front and back camera.
5 | 3. Support auto/off/force flash light.
6 | 4. Support delay to capture.
7 | 5. Support touch to focus.
8 | 6. Support HDR mode.
9 |
--------------------------------------------------------------------------------
/build.gradle:
--------------------------------------------------------------------------------
1 | // Top-level build file where you can add configuration options common to all sub-projects/modules.
2 |
3 | buildscript {
4 | repositories {
5 | jcenter()
6 | }
7 | dependencies {
8 | classpath 'com.android.tools.build:gradle:2.2.2'
9 |
10 | // NOTE: Do not place your application dependencies here; they belong
11 | // in the individual module build.gradle files
12 | classpath 'com.neenbedankt.gradle.plugins:android-apt:1.8'
13 | }
14 | }
15 |
16 | allprojects {
17 | repositories {
18 | jcenter()
19 | }
20 | }
21 |
22 | task clean(type: Delete) {
23 | delete rootProject.buildDir
24 | }
25 |
--------------------------------------------------------------------------------
/gradle.properties:
--------------------------------------------------------------------------------
1 | # Project-wide Gradle settings.
2 | # IDE (e.g. Android Studio) users:
3 | # Gradle settings configured through the IDE *will override*
4 | # any settings specified in this file.
5 | # For more details on how to configure your build environment visit
6 | # http://www.gradle.org/docs/current/userguide/build_environment.html
7 | # Specifies the JVM arguments used for the daemon process.
8 | # The setting is particularly useful for tweaking memory settings.
9 | org.gradle.jvmargs=-Xmx1536m
10 | # When configured, Gradle will run in incubating parallel mode.
11 | # This option should only be used with decoupled projects. More details, visit
12 | # http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
13 | # org.gradle.parallel=true
14 |
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gengqifu/361Camera/ea1eb1df9547b96ecbfe27b6f02bdddf4874055b/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | #Mon Dec 28 10:00:20 PST 2015
2 | distributionBase=GRADLE_USER_HOME
3 | distributionPath=wrapper/dists
4 | zipStoreBase=GRADLE_USER_HOME
5 | zipStorePath=wrapper/dists
6 | distributionUrl=https\://services.gradle.org/distributions/gradle-2.14.1-all.zip
7 |
--------------------------------------------------------------------------------
/gradlew:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | ##############################################################################
4 | ##
5 | ## Gradle start up script for UN*X
6 | ##
7 | ##############################################################################
8 |
9 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
10 | DEFAULT_JVM_OPTS=""
11 |
12 | APP_NAME="Gradle"
13 | APP_BASE_NAME=`basename "$0"`
14 |
15 | # Use the maximum available, or set MAX_FD != -1 to use that value.
16 | MAX_FD="maximum"
17 |
18 | warn ( ) {
19 | echo "$*"
20 | }
21 |
22 | die ( ) {
23 | echo
24 | echo "$*"
25 | echo
26 | exit 1
27 | }
28 |
29 | # OS specific support (must be 'true' or 'false').
30 | cygwin=false
31 | msys=false
32 | darwin=false
33 | case "`uname`" in
34 | CYGWIN* )
35 | cygwin=true
36 | ;;
37 | Darwin* )
38 | darwin=true
39 | ;;
40 | MINGW* )
41 | msys=true
42 | ;;
43 | esac
44 |
45 | # Attempt to set APP_HOME
46 | # Resolve links: $0 may be a link
47 | PRG="$0"
48 | # Need this for relative symlinks.
49 | while [ -h "$PRG" ] ; do
50 | ls=`ls -ld "$PRG"`
51 | link=`expr "$ls" : '.*-> \(.*\)$'`
52 | if expr "$link" : '/.*' > /dev/null; then
53 | PRG="$link"
54 | else
55 | PRG=`dirname "$PRG"`"/$link"
56 | fi
57 | done
58 | SAVED="`pwd`"
59 | cd "`dirname \"$PRG\"`/" >/dev/null
60 | APP_HOME="`pwd -P`"
61 | cd "$SAVED" >/dev/null
62 |
63 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
64 |
65 | # Determine the Java command to use to start the JVM.
66 | if [ -n "$JAVA_HOME" ] ; then
67 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
68 | # IBM's JDK on AIX uses strange locations for the executables
69 | JAVACMD="$JAVA_HOME/jre/sh/java"
70 | else
71 | JAVACMD="$JAVA_HOME/bin/java"
72 | fi
73 | if [ ! -x "$JAVACMD" ] ; then
74 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
75 |
76 | Please set the JAVA_HOME variable in your environment to match the
77 | location of your Java installation."
78 | fi
79 | else
80 | JAVACMD="java"
81 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
82 |
83 | Please set the JAVA_HOME variable in your environment to match the
84 | location of your Java installation."
85 | fi
86 |
87 | # Increase the maximum file descriptors if we can.
88 | if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
89 | MAX_FD_LIMIT=`ulimit -H -n`
90 | if [ $? -eq 0 ] ; then
91 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
92 | MAX_FD="$MAX_FD_LIMIT"
93 | fi
94 | ulimit -n $MAX_FD
95 | if [ $? -ne 0 ] ; then
96 | warn "Could not set maximum file descriptor limit: $MAX_FD"
97 | fi
98 | else
99 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
100 | fi
101 | fi
102 |
103 | # For Darwin, add options to specify how the application appears in the dock
104 | if $darwin; then
105 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
106 | fi
107 |
108 | # For Cygwin, switch paths to Windows format before running java
109 | if $cygwin ; then
110 | APP_HOME=`cygpath --path --mixed "$APP_HOME"`
111 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
112 | JAVACMD=`cygpath --unix "$JAVACMD"`
113 |
114 | # We build the pattern for arguments to be converted via cygpath
115 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
116 | SEP=""
117 | for dir in $ROOTDIRSRAW ; do
118 | ROOTDIRS="$ROOTDIRS$SEP$dir"
119 | SEP="|"
120 | done
121 | OURCYGPATTERN="(^($ROOTDIRS))"
122 | # Add a user-defined pattern to the cygpath arguments
123 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then
124 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
125 | fi
126 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
127 | i=0
128 | for arg in "$@" ; do
129 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
130 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
131 |
132 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
133 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
134 | else
135 | eval `echo args$i`="\"$arg\""
136 | fi
137 | i=$((i+1))
138 | done
139 | case $i in
140 | (0) set -- ;;
141 | (1) set -- "$args0" ;;
142 | (2) set -- "$args0" "$args1" ;;
143 | (3) set -- "$args0" "$args1" "$args2" ;;
144 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
145 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
146 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
147 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
148 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
149 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
150 | esac
151 | fi
152 |
153 | # Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
154 | function splitJvmOpts() {
155 | JVM_OPTS=("$@")
156 | }
157 | eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
158 | JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
159 |
160 | exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
161 |
--------------------------------------------------------------------------------
/gradlew.bat:
--------------------------------------------------------------------------------
1 | @if "%DEBUG%" == "" @echo off
2 | @rem ##########################################################################
3 | @rem
4 | @rem Gradle startup script for Windows
5 | @rem
6 | @rem ##########################################################################
7 |
8 | @rem Set local scope for the variables with windows NT shell
9 | if "%OS%"=="Windows_NT" setlocal
10 |
11 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
12 | set DEFAULT_JVM_OPTS=
13 |
14 | set DIRNAME=%~dp0
15 | if "%DIRNAME%" == "" set DIRNAME=.
16 | set APP_BASE_NAME=%~n0
17 | set APP_HOME=%DIRNAME%
18 |
19 | @rem Find java.exe
20 | if defined JAVA_HOME goto findJavaFromJavaHome
21 |
22 | set JAVA_EXE=java.exe
23 | %JAVA_EXE% -version >NUL 2>&1
24 | if "%ERRORLEVEL%" == "0" goto init
25 |
26 | echo.
27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
28 | echo.
29 | echo Please set the JAVA_HOME variable in your environment to match the
30 | echo location of your Java installation.
31 |
32 | goto fail
33 |
34 | :findJavaFromJavaHome
35 | set JAVA_HOME=%JAVA_HOME:"=%
36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
37 |
38 | if exist "%JAVA_EXE%" goto init
39 |
40 | echo.
41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
42 | echo.
43 | echo Please set the JAVA_HOME variable in your environment to match the
44 | echo location of your Java installation.
45 |
46 | goto fail
47 |
48 | :init
49 | @rem Get command-line arguments, handling Windowz variants
50 |
51 | if not "%OS%" == "Windows_NT" goto win9xME_args
52 | if "%@eval[2+2]" == "4" goto 4NT_args
53 |
54 | :win9xME_args
55 | @rem Slurp the command line arguments.
56 | set CMD_LINE_ARGS=
57 | set _SKIP=2
58 |
59 | :win9xME_args_slurp
60 | if "x%~1" == "x" goto execute
61 |
62 | set CMD_LINE_ARGS=%*
63 | goto execute
64 |
65 | :4NT_args
66 | @rem Get arguments from the 4NT Shell from JP Software
67 | set CMD_LINE_ARGS=%$
68 |
69 | :execute
70 | @rem Setup the command line
71 |
72 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
73 |
74 | @rem Execute Gradle
75 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
76 |
77 | :end
78 | @rem End local scope for the variables with windows NT shell
79 | if "%ERRORLEVEL%"=="0" goto mainEnd
80 |
81 | :fail
82 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
83 | rem the _cmd.exe /c_ return code!
84 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
85 | exit /b 1
86 |
87 | :mainEnd
88 | if "%OS%"=="Windows_NT" endlocal
89 |
90 | :omega
91 |
--------------------------------------------------------------------------------
/settings.gradle:
--------------------------------------------------------------------------------
1 | include ':361camera'
2 |
--------------------------------------------------------------------------------