├── .gitignore
├── .idea
├── .gitignore
├── .name
├── compiler.xml
├── gradle.xml
├── jarRepositories.xml
├── misc.xml
└── vcs.xml
├── README.md
├── app
├── .gitignore
├── build.gradle
├── proguard-rules.pro
└── src
│ ├── androidTest
│ └── java
│ │ └── io
│ │ └── intelligible
│ │ └── arcoremlkit
│ │ └── ExampleInstrumentedTest.kt
│ ├── main
│ ├── AndroidManifest.xml
│ ├── assets
│ │ ├── model.tflite
│ │ └── shaders
│ │ │ ├── background_show_camera.frag
│ │ │ ├── background_show_camera.vert
│ │ │ ├── label.frag
│ │ │ ├── label.vert
│ │ │ ├── point_cloud.frag
│ │ │ └── point_cloud.vert
│ ├── java
│ │ └── io
│ │ │ └── intelligible
│ │ │ └── arcoremlkit
│ │ │ ├── YuvToRgbConverter.kt
│ │ │ ├── common
│ │ │ ├── helpers
│ │ │ │ ├── DisplayRotationHelper.kt
│ │ │ │ ├── FullScreenHelper.kt
│ │ │ │ ├── SnackbarHelper.kt
│ │ │ │ └── TrackingStateHelper.kt
│ │ │ └── samplerender
│ │ │ │ ├── Framebuffer.kt
│ │ │ │ ├── GLError.kt
│ │ │ │ ├── GpuBuffer.kt
│ │ │ │ ├── IndexBuffer.kt
│ │ │ │ ├── Mesh.kt
│ │ │ │ ├── SampleRender.kt
│ │ │ │ ├── Shader.kt
│ │ │ │ ├── Texture.kt
│ │ │ │ ├── VertexBuffer.kt
│ │ │ │ └── arcore
│ │ │ │ └── BackgroundRenderer.kt
│ │ │ ├── ml
│ │ │ ├── ARCoreSessionLifecycle.kt
│ │ │ ├── AppRenderer.kt
│ │ │ ├── MainActivity.kt
│ │ │ ├── MainActivityView.kt
│ │ │ ├── classification
│ │ │ │ ├── DetectedObjectResult.kt
│ │ │ │ ├── GoogleCloudVisionDetector.kt
│ │ │ │ ├── MLKitObjectDetector.kt
│ │ │ │ ├── ObjectDetector.kt
│ │ │ │ └── utils
│ │ │ │ │ ├── ImageUtils.kt
│ │ │ │ │ └── VertexUtils.kt
│ │ │ └── render
│ │ │ │ ├── LabelRender.kt
│ │ │ │ ├── PointCloudRender.kt
│ │ │ │ └── TextTextureCache.kt
│ │ │ └── utils
│ │ │ └── CameraUtils.kt
│ └── res
│ │ ├── drawable-v24
│ │ └── ic_launcher_foreground.xml
│ │ ├── drawable
│ │ └── ic_launcher_background.xml
│ │ ├── layout
│ │ └── activity_main.xml
│ │ ├── mipmap-anydpi-v26
│ │ ├── ic_launcher.xml
│ │ └── ic_launcher_round.xml
│ │ ├── mipmap-hdpi
│ │ ├── ic_launcher.png
│ │ └── ic_launcher_round.png
│ │ ├── mipmap-mdpi
│ │ ├── ic_launcher.png
│ │ └── ic_launcher_round.png
│ │ ├── mipmap-xhdpi
│ │ ├── ic_launcher.png
│ │ └── ic_launcher_round.png
│ │ ├── mipmap-xxhdpi
│ │ ├── ic_launcher.png
│ │ └── ic_launcher_round.png
│ │ ├── mipmap-xxxhdpi
│ │ ├── ic_launcher.png
│ │ └── ic_launcher_round.png
│ │ ├── values-night
│ │ └── themes.xml
│ │ └── values
│ │ ├── colors.xml
│ │ ├── strings.xml
│ │ └── themes.xml
│ └── test
│ └── java
│ └── io
│ └── intelligible
│ └── arcoremlkit
│ └── ExampleUnitTest.kt
├── build.gradle
├── gradle.properties
├── gradle
└── wrapper
│ ├── gradle-wrapper.jar
│ └── gradle-wrapper.properties
├── gradlew
├── gradlew.bat
└── settings.gradle
/.gitignore:
--------------------------------------------------------------------------------
1 | *.iml
2 | .gradle
3 | /local.properties
4 | /.idea/caches
5 | /.idea/libraries
6 | /.idea/modules.xml
7 | /.idea/workspace.xml
8 | /.idea/navEditor.xml
9 | /.idea/assetWizardSettings.xml
10 | .DS_Store
11 | /build
12 | /captures
13 | .externalNativeBuild
14 | .cxx
15 | local.properties
16 |
--------------------------------------------------------------------------------
/.idea/.gitignore:
--------------------------------------------------------------------------------
1 | # Default ignored files
2 | /shelf/
3 | /workspace.xml
4 |
--------------------------------------------------------------------------------
/.idea/.name:
--------------------------------------------------------------------------------
1 | ARCore MLkit
--------------------------------------------------------------------------------
/.idea/compiler.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/.idea/gradle.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
21 |
22 |
--------------------------------------------------------------------------------
/.idea/jarRepositories.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
--------------------------------------------------------------------------------
/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/.idea/vcs.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 | # ARcoreMLKit Object Detection
3 | Arcore ml kit integration without sceneform
4 | ARCore Multiple objext recognition with mlkit without using sceneform. Opengl is used for AR usage and attaching anchors to the detected objects
5 |
6 | https://user-images.githubusercontent.com/61690178/124721329-7503d680-df22-11eb-9967-84f7c5a5b2d0.mp4
7 |
8 |
--------------------------------------------------------------------------------
/app/.gitignore:
--------------------------------------------------------------------------------
1 | /build
--------------------------------------------------------------------------------
/app/build.gradle:
--------------------------------------------------------------------------------
1 | plugins {
2 | id 'com.android.application'
3 | id 'kotlin-android'
4 | }
5 |
6 | android {
7 | compileSdkVersion 30
8 | buildToolsVersion "30.0.3"
9 |
10 | defaultConfig {
11 | applicationId "io.intelligible.arcoremlkit"
12 | minSdkVersion 24
13 | targetSdkVersion 30
14 | versionCode 1
15 | versionName "1.0"
16 |
17 | testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
18 | }
19 | packagingOptions {
20 | exclude 'META-INF/DEPENDENCIES'
21 | exclude 'META-INF/LICENSE'
22 | exclude 'META-INF/LICENSE.txt'
23 | exclude 'META-INF/license.txt'
24 | exclude 'META-INF/NOTICE'
25 | exclude 'META-INF/NOTICE.txt'
26 | exclude 'META-INF/notice.txt'
27 | exclude 'META-INF/ASL2.0'
28 | exclude 'META-INF/INDEX.LIST'
29 | exclude("META-INF/*.kotlin_module")
30 | }
31 |
32 | buildTypes {
33 | release {
34 | minifyEnabled false
35 | proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
36 | }
37 | }
38 | compileOptions {
39 | sourceCompatibility JavaVersion.VERSION_1_8
40 | targetCompatibility JavaVersion.VERSION_1_8
41 | }
42 | kotlinOptions {
43 | jvmTarget = '1.8'
44 | }
45 | }
46 |
47 | dependencies {
48 |
49 | implementation "org.jetbrains.kotlin:kotlin-stdlib:$kotlin_version"
50 | implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
51 | implementation 'org.jetbrains.kotlinx:kotlinx-coroutines-core:1.4.2'
52 | implementation 'org.jetbrains.kotlinx:kotlinx-coroutines-play-services:1.4.2'
53 |
54 | implementation 'androidx.appcompat:appcompat:1.2.0'
55 | implementation 'androidx.coordinatorlayout:coordinatorlayout:1.1.0'
56 | implementation 'androidx.lifecycle:lifecycle-common-java8:2.3.1'
57 |
58 | implementation 'com.google.android.material:material:1.3.0'
59 |
60 | // Google Cloud Vision APIs
61 | implementation platform('com.google.cloud:libraries-bom:19.2.1')
62 | implementation 'com.google.cloud:google-cloud-vision:1.102.0'
63 | implementation 'io.grpc:grpc-okhttp:1.36.0'
64 |
65 | // MLKit
66 | implementation 'com.google.mlkit:object-detection:16.2.6'
67 | implementation 'com.google.mlkit:object-detection-custom:16.3.3'
68 |
69 | // ARCore
70 | implementation 'com.google.ar:core:1.25.0'
71 |
72 | // Obj - a simple Wavefront OBJ file loader
73 | // https://github.com/javagl/Obj
74 | implementation 'de.javagl:obj:0.2.1'
75 | }
--------------------------------------------------------------------------------
/app/proguard-rules.pro:
--------------------------------------------------------------------------------
1 | # Add project specific ProGuard rules here.
2 | # You can control the set of applied configuration files using the
3 | # proguardFiles setting in build.gradle.
4 | #
5 | # For more details, see
6 | # http://developer.android.com/guide/developing/tools/proguard.html
7 |
8 | # If your project uses WebView with JS, uncomment the following
9 | # and specify the fully qualified class name to the JavaScript interface
10 | # class:
11 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
12 | # public *;
13 | #}
14 |
15 | # Uncomment this to preserve the line number information for
16 | # debugging stack traces.
17 | #-keepattributes SourceFile,LineNumberTable
18 |
19 | # If you keep the line number information, uncomment this to
20 | # hide the original source file name.
21 | #-renamesourcefileattribute SourceFile
--------------------------------------------------------------------------------
/app/src/androidTest/java/io/intelligible/arcoremlkit/ExampleInstrumentedTest.kt:
--------------------------------------------------------------------------------
1 | package io.intelligible.arcoremlkit
2 |
3 | import androidx.test.platform.app.InstrumentationRegistry
4 | import androidx.test.ext.junit.runners.AndroidJUnit4
5 |
6 | import org.junit.Test
7 | import org.junit.runner.RunWith
8 |
9 | import org.junit.Assert.*
10 |
11 | /**
12 | * Instrumented test, which will execute on an Android device.
13 | *
14 | * See [testing documentation](http://d.android.com/tools/testing).
15 | */
16 | @RunWith(AndroidJUnit4::class)
17 | class ExampleInstrumentedTest {
18 | @Test
19 | fun useAppContext() {
20 | // Context of the app under test.
21 | val appContext = InstrumentationRegistry.getInstrumentation().targetContext
22 | assertEquals("io.intelligible.arcoremlkit", appContext.packageName)
23 | }
24 | }
--------------------------------------------------------------------------------
/app/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 |
5 |
6 |
13 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
--------------------------------------------------------------------------------
/app/src/main/assets/model.tflite:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kashif-E/ARcoreMLKit-object-detection/0b08afb79e3431c4c3a4644014ef0057b12f196b/app/src/main/assets/model.tflite
--------------------------------------------------------------------------------
/app/src/main/assets/shaders/background_show_camera.frag:
--------------------------------------------------------------------------------
1 | #version 300 es
2 | /*
3 | * Copyright 2017 Google LLC
4 | *
5 | * Licensed under the Apache License, Version 2.0 (the "License");
6 | * you may not use this file except in compliance with the License.
7 | * You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 | #extension GL_OES_EGL_image_external_essl3 : require
18 | precision mediump float;
19 |
20 | uniform samplerExternalOES u_CameraColorTexture;
21 |
22 | in vec2 v_CameraTexCoord;
23 |
24 | layout(location = 0) out vec4 o_FragColor;
25 |
26 | void main() { o_FragColor = texture(u_CameraColorTexture, v_CameraTexCoord); }
27 |
--------------------------------------------------------------------------------
/app/src/main/assets/shaders/background_show_camera.vert:
--------------------------------------------------------------------------------
1 | #version 300 es
2 | /*
3 | * Copyright 2017 Google LLC
4 | *
5 | * Licensed under the Apache License, Version 2.0 (the "License");
6 | * you may not use this file except in compliance with the License.
7 | * You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 |
18 | layout(location = 0) in vec4 a_Position;
19 | layout(location = 1) in vec2 a_CameraTexCoord;
20 | // The virtual scene texture coordinate is unused in the background shader, but
21 | // is defined in the BackgroundRenderer Mesh.
22 | layout(location = 2) in vec2 a_VirtualSceneTexCoord;
23 |
24 | out vec2 v_CameraTexCoord;
25 |
26 | void main() {
27 | gl_Position = a_Position;
28 | v_CameraTexCoord = a_CameraTexCoord;
29 | }
30 |
--------------------------------------------------------------------------------
/app/src/main/assets/shaders/label.frag:
--------------------------------------------------------------------------------
1 | #version 300 es
2 | /*
3 | * Copyright 2021 Google LLC
4 | *
5 | * Licensed under the Apache License, Version 2.0 (the "License");
6 | * you may not use this file except in compliance with the License.
7 | * You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 | precision mediump float;
18 |
19 | uniform sampler2D uTexture;
20 | in vec2 vTexPos;
21 |
22 | layout(location = 0) out vec4 o_FragColor;
23 |
24 | void main(void) {
25 | o_FragColor = texture(uTexture, vec2(vTexPos.x, 1.0 - vTexPos.y));
26 | }
--------------------------------------------------------------------------------
/app/src/main/assets/shaders/label.vert:
--------------------------------------------------------------------------------
1 | #version 300 es
2 | /*
3 | * Copyright 2021 Google LLC
4 | *
5 | * Licensed under the Apache License, Version 2.0 (the "License");
6 | * you may not use this file except in compliance with the License.
7 | * You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 |
18 | layout(location = 0) in vec2 aPosition;
19 | layout(location = 1) in vec2 aTexPos;
20 |
21 | out vec2 vTexPos;
22 |
23 | uniform mat4 u_ViewProjection;
24 | uniform vec3 u_CameraPos;
25 | uniform vec3 u_LabelOrigin;
26 |
27 | void main() {
28 | vTexPos = aTexPos;
29 | vec3 labelNormal = normalize(u_CameraPos - u_LabelOrigin);
30 | vec3 labelSide = -cross(labelNormal, vec3(0.0, 1.0, 0.0));
31 | vec3 modelPosition = u_LabelOrigin + aPosition.x*0.1 * labelSide + aPosition.y * vec3(0.0, 1.0, 0.0)*0.1;
32 | gl_Position = u_ViewProjection * vec4(modelPosition, 1.0);
33 | }
--------------------------------------------------------------------------------
/app/src/main/assets/shaders/point_cloud.frag:
--------------------------------------------------------------------------------
1 | #version 300 es
2 | /*
3 | * Copyright 2017 Google LLC
4 | *
5 | * Licensed under the Apache License, Version 2.0 (the "License");
6 | * you may not use this file except in compliance with the License.
7 | * You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 | precision mediump float;
18 |
19 | uniform vec4 u_Color;
20 |
21 | out vec4 o_FragColor;
22 |
23 | void main() {
24 | o_FragColor = u_Color;
25 | }
26 |
--------------------------------------------------------------------------------
/app/src/main/assets/shaders/point_cloud.vert:
--------------------------------------------------------------------------------
1 | #version 300 es
2 | /*
3 | * Copyright 2017 Google LLC
4 | *
5 | * Licensed under the Apache License, Version 2.0 (the "License");
6 | * you may not use this file except in compliance with the License.
7 | * You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 |
18 | uniform mat4 u_ModelViewProjection;
19 | uniform float u_PointSize;
20 |
21 | layout(location = 0) in vec4 a_Position;
22 |
23 | void main() {
24 | gl_Position = u_ModelViewProjection * vec4(a_Position.xyz, 1.0);
25 | gl_PointSize = u_PointSize;
26 | }
27 |
--------------------------------------------------------------------------------
/app/src/main/java/io/intelligible/arcoremlkit/YuvToRgbConverter.kt:
--------------------------------------------------------------------------------
1 |
2 | package io.intelligible.arcoremlkit
3 |
4 | import android.content.Context
5 | import android.graphics.Bitmap
6 | import android.graphics.ImageFormat
7 | import android.graphics.Rect
8 | import android.media.Image
9 | import android.renderscript.Allocation
10 | import android.renderscript.Element
11 | import android.renderscript.RenderScript
12 | import android.renderscript.ScriptIntrinsicYuvToRGB
13 | import android.renderscript.Type
14 |
15 | /**
16 | * Helper class used to efficiently convert a [Media.Image] object from
17 | * [ImageFormat.YUV_420_888] format to an RGB [Bitmap] object.
18 | *
19 | * The [yuvToRgb] method is able to achieve the same FPS as the CameraX image
20 | * analysis use case on a Pixel 3 XL device at the default analyzer resolution,
21 | * which is 30 FPS with 640x480.
22 | *
23 | * NOTE: This has been tested in a limited number of devices and is not
24 | * considered production-ready code. It was created for illustration purposes,
25 | * since this is not an efficient camera pipeline due to the multiple copies
26 | * required to convert each frame.
27 | */
28 | class YuvToRgbConverter(context: Context) {
29 | private val rs = RenderScript.create(context)
30 | private val scriptYuvToRgb = ScriptIntrinsicYuvToRGB.create(rs, Element.U8_4(rs))
31 |
32 | private var pixelCount: Int = -1
33 | private lateinit var yuvBuffer: ByteArray
34 | private lateinit var inputAllocation: Allocation
35 | private lateinit var outputAllocation: Allocation
36 |
37 | @Synchronized
38 | fun yuvToRgb(image: Image, output: Bitmap) {
39 |
40 | // Ensure that the intermediate output byte buffer is allocated
41 | if (!::yuvBuffer.isInitialized) {
42 | pixelCount = image.width * image.height
43 | // Bits per pixel is an average for the whole image, so it's useful to compute the size
44 | // of the full buffer but should not be used to determine pixel offsets
45 | val pixelSizeBits = ImageFormat.getBitsPerPixel(ImageFormat.YUV_420_888)
46 | yuvBuffer = ByteArray(pixelCount * pixelSizeBits / 8)
47 | }
48 |
49 | // Get the YUV data in byte array form using NV21 format
50 | imageToByteArray(image, yuvBuffer)
51 |
52 | // Ensure that the RenderScript inputs and outputs are allocated
53 | if (!::inputAllocation.isInitialized) {
54 | // Explicitly create an element with type NV21, since that's the pixel format we use
55 | val elemType = Type.Builder(rs, Element.YUV(rs)).setYuvFormat(ImageFormat.NV21).create()
56 | inputAllocation = Allocation.createSized(rs, elemType.element, yuvBuffer.size)
57 | }
58 | if (!::outputAllocation.isInitialized) {
59 | outputAllocation = Allocation.createFromBitmap(rs, output)
60 | }
61 |
62 | // Convert NV21 format YUV to RGB
63 | inputAllocation.copyFrom(yuvBuffer)
64 | scriptYuvToRgb.setInput(inputAllocation)
65 | scriptYuvToRgb.forEach(outputAllocation)
66 | outputAllocation.copyTo(output)
67 | }
68 |
69 | private fun imageToByteArray(image: Image, outputBuffer: ByteArray) {
70 | assert(image.format == ImageFormat.YUV_420_888)
71 |
72 | val imageCrop = Rect(0, 0, image.width, image.height)
73 | val imagePlanes = image.planes
74 |
75 | imagePlanes.forEachIndexed { planeIndex, plane ->
76 | // How many values are read in input for each output value written
77 | // Only the Y plane has a value for every pixel, U and V have half the resolution i.e.
78 | //
79 | // Y Plane U Plane V Plane
80 | // =============== ======= =======
81 | // Y Y Y Y Y Y Y Y U U U U V V V V
82 | // Y Y Y Y Y Y Y Y U U U U V V V V
83 | // Y Y Y Y Y Y Y Y U U U U V V V V
84 | // Y Y Y Y Y Y Y Y U U U U V V V V
85 | // Y Y Y Y Y Y Y Y
86 | // Y Y Y Y Y Y Y Y
87 | // Y Y Y Y Y Y Y Y
88 | val outputStride: Int
89 |
90 | // The index in the output buffer the next value will be written at
91 | // For Y it's zero, for U and V we start at the end of Y and interleave them i.e.
92 | //
93 | // First chunk Second chunk
94 | // =============== ===============
95 | // Y Y Y Y Y Y Y Y V U V U V U V U
96 | // Y Y Y Y Y Y Y Y V U V U V U V U
97 | // Y Y Y Y Y Y Y Y V U V U V U V U
98 | // Y Y Y Y Y Y Y Y V U V U V U V U
99 | // Y Y Y Y Y Y Y Y
100 | // Y Y Y Y Y Y Y Y
101 | // Y Y Y Y Y Y Y Y
102 | var outputOffset: Int
103 |
104 | when (planeIndex) {
105 | 0 -> {
106 | outputStride = 1
107 | outputOffset = 0
108 | }
109 | 1 -> {
110 | outputStride = 2
111 | // For NV21 format, U is in odd-numbered indices
112 | outputOffset = pixelCount + 1
113 | }
114 | 2 -> {
115 | outputStride = 2
116 | // For NV21 format, V is in even-numbered indices
117 | outputOffset = pixelCount
118 | }
119 | else -> {
120 | // Image contains more than 3 planes, something strange is going on
121 | return@forEachIndexed
122 | }
123 | }
124 |
125 | val planeBuffer = plane.buffer
126 | val rowStride = plane.rowStride
127 | val pixelStride = plane.pixelStride
128 |
129 | // We have to divide the width and height by two if it's not the Y plane
130 | val planeCrop = if (planeIndex == 0) {
131 | imageCrop
132 | } else {
133 | Rect(
134 | imageCrop.left / 2,
135 | imageCrop.top / 2,
136 | imageCrop.right / 2,
137 | imageCrop.bottom / 2
138 | )
139 | }
140 |
141 | val planeWidth = planeCrop.width()
142 | val planeHeight = planeCrop.height()
143 |
144 | // Intermediate buffer used to store the bytes of each row
145 | val rowBuffer = ByteArray(plane.rowStride)
146 |
147 | // Size of each row in bytes
148 | val rowLength = if (pixelStride == 1 && outputStride == 1) {
149 | planeWidth
150 | } else {
151 | // Take into account that the stride may include data from pixels other than this
152 | // particular plane and row, and that could be between pixels and not after every
153 | // pixel:
154 | //
155 | // |---- Pixel stride ----| Row ends here --> |
156 | // | Pixel 1 | Other Data | Pixel 2 | Other Data | ... | Pixel N |
157 | //
158 | // We need to get (N-1) * (pixel stride bytes) per row + 1 byte for the last pixel
159 | (planeWidth - 1) * pixelStride + 1
160 | }
161 |
162 | for (row in 0 until planeHeight) {
163 | // Move buffer position to the beginning of this row
164 | planeBuffer.position(
165 | (row + planeCrop.top) * rowStride + planeCrop.left * pixelStride
166 | )
167 |
168 | if (pixelStride == 1 && outputStride == 1) {
169 | // When there is a single stride value for pixel and output, we can just copy
170 | // the entire row in a single step
171 | planeBuffer.get(outputBuffer, outputOffset, rowLength)
172 | outputOffset += rowLength
173 | } else {
174 | // When either pixel or output have a stride > 1 we must copy pixel by pixel
175 | planeBuffer.get(rowBuffer, 0, rowLength)
176 | for (col in 0 until planeWidth) {
177 | outputBuffer[outputOffset] = rowBuffer[col * pixelStride]
178 | outputOffset += outputStride
179 | }
180 | }
181 | }
182 | }
183 | }
184 | }
--------------------------------------------------------------------------------
/app/src/main/java/io/intelligible/arcoremlkit/common/helpers/DisplayRotationHelper.kt:
--------------------------------------------------------------------------------
1 |
2 | package io.intelligible.arcoremlkit.common.helpers
3 |
4 | import android.content.Context
5 | import android.hardware.camera2.CameraAccessException
6 | import android.hardware.camera2.CameraCharacteristics
7 | import android.hardware.camera2.CameraManager
8 | import android.hardware.display.DisplayManager
9 | import android.hardware.display.DisplayManager.DisplayListener
10 | import android.view.Display
11 | import android.view.Surface
12 | import android.view.WindowManager
13 | import com.google.ar.core.Session
14 |
15 | /**
16 | * Helper to track the display rotations. In particular, the 180 degree rotations are not notified
17 | * by the onSurfaceChanged() callback, and thus they require listening to the android display
18 | * events.
19 | */
20 | class DisplayRotationHelper(context: Context) : DisplayListener {
21 | private var viewportChanged = false
22 | private var viewportWidth = 0
23 | private var viewportHeight = 0
24 | private val display: Display
25 | private val displayManager: DisplayManager
26 | private val cameraManager: CameraManager
27 |
28 | /** Registers the display listener. Should be called from [Activity.onResume]. */
29 | fun onResume() {
30 | displayManager.registerDisplayListener(this, null)
31 | }
32 |
33 | /** Unregisters the display listener. Should be called from [Activity.onPause]. */
34 | fun onPause() {
35 | displayManager.unregisterDisplayListener(this)
36 | }
37 |
38 | /**
39 | * Records a change in surface dimensions. This will be later used by [ ][.updateSessionIfNeeded]. Should be called from [ ].
40 | *
41 | * @param width the updated width of the surface.
42 | * @param height the updated height of the surface.
43 | */
44 | fun onSurfaceChanged(width: Int, height: Int) {
45 | viewportWidth = width
46 | viewportHeight = height
47 | viewportChanged = true
48 | }
49 |
50 | /**
51 | * Updates the session display geometry if a change was posted either by [ ][.onSurfaceChanged] call or by [.onDisplayChanged] system callback. This
52 | * function should be called explicitly before each call to [Session.update]. This
53 | * function will also clear the 'pending update' (viewportChanged) flag.
54 | *
55 | * @param session the [Session] object to update if display geometry changed.
56 | */
57 | fun updateSessionIfNeeded(session: Session) {
58 | if (viewportChanged) {
59 | val displayRotation = display.rotation
60 | session.setDisplayGeometry(displayRotation, viewportWidth, viewportHeight)
61 | viewportChanged = false
62 | }
63 | }
64 |
65 | /**
66 | * Returns the aspect ratio of the GL surface viewport while accounting for the display rotation
67 | * relative to the device camera sensor orientation.
68 | */
69 | fun getCameraSensorRelativeViewportAspectRatio(cameraId: String?): Float {
70 | val aspectRatio: Float
71 | val cameraSensorToDisplayRotation = getCameraSensorToDisplayRotation(cameraId)
72 | aspectRatio = when (cameraSensorToDisplayRotation) {
73 | 90, 270 -> viewportHeight.toFloat() / viewportWidth.toFloat()
74 | 0, 180 -> viewportWidth.toFloat() / viewportHeight.toFloat()
75 | else -> throw RuntimeException("Unhandled rotation: $cameraSensorToDisplayRotation")
76 | }
77 | return aspectRatio
78 | }
79 |
80 | /**
81 | * Returns the rotation of the back-facing camera with respect to the display. The value is one of
82 | * 0, 90, 180, 270.
83 | */
84 | fun getCameraSensorToDisplayRotation(cameraId: String?): Int {
85 | val characteristics: CameraCharacteristics
86 | characteristics = try {
87 | cameraManager.getCameraCharacteristics(cameraId!!)
88 | } catch (e: CameraAccessException) {
89 | throw RuntimeException("Unable to determine display orientation", e)
90 | }
91 |
92 | // Camera sensor orientation.
93 | val sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION)!!
94 |
95 | // Current display orientation.
96 | val displayOrientation = toDegrees(display.rotation)
97 |
98 | // Make sure we return 0, 90, 180, or 270 degrees.
99 | return (sensorOrientation - displayOrientation + 360) % 360
100 | }
101 |
102 | private fun toDegrees(rotation: Int): Int {
103 | return when (rotation) {
104 | Surface.ROTATION_0 -> 0
105 | Surface.ROTATION_90 -> 90
106 | Surface.ROTATION_180 -> 180
107 | Surface.ROTATION_270 -> 270
108 | else -> throw RuntimeException("Unknown rotation $rotation")
109 | }
110 | }
111 |
112 | override fun onDisplayAdded(displayId: Int) {}
113 | override fun onDisplayRemoved(displayId: Int) {}
114 | override fun onDisplayChanged(displayId: Int) {
115 | viewportChanged = true
116 | }
117 |
118 | /**
119 | * Constructs the DisplayRotationHelper but does not register the listener yet.
120 | *
121 | * @param context the Android [Context].
122 | */
123 | init {
124 | displayManager = context.getSystemService(Context.DISPLAY_SERVICE) as DisplayManager
125 | cameraManager = context.getSystemService(Context.CAMERA_SERVICE) as CameraManager
126 | val windowManager = context.getSystemService(Context.WINDOW_SERVICE) as WindowManager
127 | display = windowManager.defaultDisplay
128 | }
129 | }
--------------------------------------------------------------------------------
/app/src/main/java/io/intelligible/arcoremlkit/common/helpers/FullScreenHelper.kt:
--------------------------------------------------------------------------------
1 |
2 | package io.intelligible.arcoremlkit.common.helpers
3 |
4 | import android.app.Activity
5 | import android.view.View
6 |
7 | /** Helper to set up the Android full screen mode. */
8 | object FullScreenHelper {
9 | /**
10 | * Sets the Android fullscreen flags. Expected to be called from [ ][Activity.onWindowFocusChanged].
11 | *
12 | * @param activity the Activity on which the full screen mode will be set.
13 | * @param hasFocus the hasFocus flag passed from the [Activity.onWindowFocusChanged] callback.
14 | */
15 | fun setFullScreenOnWindowFocusChanged(activity: Activity, hasFocus: Boolean) {
16 | if (hasFocus) {
17 | // https://developer.android.com/training/system-ui/immersive.html#sticky
18 | activity
19 | .window
20 | .decorView.systemUiVisibility = (View.SYSTEM_UI_FLAG_LAYOUT_STABLE
21 | or View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION
22 | or View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN
23 | or View.SYSTEM_UI_FLAG_HIDE_NAVIGATION
24 | or View.SYSTEM_UI_FLAG_FULLSCREEN
25 | or View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY)
26 | }
27 | }
28 | }
--------------------------------------------------------------------------------
/app/src/main/java/io/intelligible/arcoremlkit/common/helpers/SnackbarHelper.kt:
--------------------------------------------------------------------------------
1 |
2 | package io.intelligible.arcoremlkit.common.helpers
3 |
4 | import android.R
5 | import android.app.Activity
6 | import android.view.View
7 | import android.widget.TextView
8 | import com.google.android.material.snackbar.BaseTransientBottomBar.BaseCallback
9 | import com.google.android.material.snackbar.Snackbar
10 |
11 | /**
12 | * Helper to manage the sample snackbar. Hides the Android boilerplate code, and exposes simpler
13 | * methods.
14 | */
15 | class SnackbarHelper {
16 | private var messageSnackbar: Snackbar? = null
17 |
18 | private enum class DismissBehavior {
19 | HIDE, SHOW, FINISH
20 | }
21 |
22 | private var maxLines = 2
23 | private var lastMessage = ""
24 | private var snackbarView: View? = null
25 | val isShowing: Boolean
26 | get() = messageSnackbar != null
27 |
28 | /** Shows a snackbar with a given message. */
29 | fun showMessage(activity: Activity, message: String) {
30 | if (!message.isEmpty() && (!isShowing || lastMessage != message)) {
31 | lastMessage = message
32 | show(activity, message, DismissBehavior.HIDE)
33 | }
34 | }
35 |
36 | /** Shows a snackbar with a given message, and a dismiss button. */
37 | fun showMessageWithDismiss(activity: Activity, message: String) {
38 | show(activity, message, DismissBehavior.SHOW)
39 | }
40 |
41 | /**
42 | * Shows a snackbar with a given error message. When dismissed, will finish the activity. Useful
43 | * for notifying errors, where no further interaction with the activity is possible.
44 | */
45 | fun showError(activity: Activity, errorMessage: String) {
46 | show(activity, errorMessage, DismissBehavior.FINISH)
47 | }
48 |
49 | /**
50 | * Hides the currently showing snackbar, if there is one. Safe to call from any thread. Safe to
51 | * call even if snackbar is not shown.
52 | */
53 | fun hide(activity: Activity) {
54 | if (!isShowing) {
55 | return
56 | }
57 | lastMessage = ""
58 | val messageSnackbarToHide = messageSnackbar
59 | messageSnackbar = null
60 | activity.runOnUiThread { messageSnackbarToHide!!.dismiss() }
61 | }
62 |
63 | fun setMaxLines(lines: Int) {
64 | maxLines = lines
65 | }
66 |
67 | fun setParentView(snackbarView: View?) {
68 | this.snackbarView = snackbarView
69 | }
70 |
71 | private fun show(
72 | activity: Activity, message: String, dismissBehavior: DismissBehavior
73 | ) {
74 | activity.runOnUiThread {
75 | messageSnackbar = Snackbar.make(
76 | activity.findViewById(R.id.content) ,
77 | message,
78 | Snackbar.LENGTH_INDEFINITE
79 | )
80 | messageSnackbar!!.view.setBackgroundColor(BACKGROUND_COLOR)
81 | if (dismissBehavior != DismissBehavior.HIDE) {
82 | messageSnackbar!!.setAction(
83 | "Dismiss"
84 | ) { v: View? -> messageSnackbar!!.dismiss() }
85 | if (dismissBehavior == DismissBehavior.FINISH) {
86 | messageSnackbar!!.addCallback(
87 | object : BaseCallback() {
88 | override fun onDismissed(transientBottomBar: Snackbar?, event: Int) {
89 | super.onDismissed(transientBottomBar, event)
90 | }
91 | })
92 | }
93 | }
94 | (messageSnackbar!!
95 | .view
96 | .findViewById(com.google.android.material.R.id.snackbar_text) as TextView).maxLines =
97 | maxLines
98 | messageSnackbar!!.show()
99 | }
100 | }
101 |
102 | companion object {
103 | private const val BACKGROUND_COLOR = -0x40cdcdce
104 | }
105 | }
--------------------------------------------------------------------------------
/app/src/main/java/io/intelligible/arcoremlkit/common/helpers/TrackingStateHelper.kt:
--------------------------------------------------------------------------------
1 |
2 | package io.intelligible.arcoremlkit.common.helpers
3 |
4 | import android.app.Activity
5 | import android.view.WindowManager
6 | import com.google.ar.core.Camera
7 | import com.google.ar.core.TrackingFailureReason
8 | import com.google.ar.core.TrackingState
9 |
10 | /** Gets human readable tracking failure reasons and suggested actions. */
11 | class TrackingStateHelper(private val activity: Activity) {
12 | private var previousTrackingState: TrackingState? = null
13 |
14 | /** Keep the screen unlocked while tracking, but allow it to lock when tracking stops. */
15 | fun updateKeepScreenOnFlag(trackingState: TrackingState) {
16 | if (trackingState == previousTrackingState) {
17 | return
18 | }
19 | previousTrackingState = trackingState
20 | when (trackingState) {
21 | TrackingState.PAUSED, TrackingState.STOPPED -> activity.runOnUiThread {
22 | activity.window.clearFlags(
23 | WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON
24 | )
25 | }
26 | TrackingState.TRACKING -> activity.runOnUiThread {
27 | activity.window.addFlags(
28 | WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON
29 | )
30 | }
31 | }
32 | }
33 |
34 | companion object {
35 | private const val INSUFFICIENT_FEATURES_MESSAGE =
36 | "Can't find anything. Aim device at a surface with more texture or color."
37 | private const val EXCESSIVE_MOTION_MESSAGE = "Moving too fast. Slow down."
38 | private const val INSUFFICIENT_LIGHT_MESSAGE = "Too dark. Try moving to a well-lit area."
39 | private const val BAD_STATE_MESSAGE =
40 | "Tracking lost due to bad internal state. Please try restarting the AR experience."
41 | private const val CAMERA_UNAVAILABLE_MESSAGE =
42 | "Another app is using the camera. Tap on this app or try closing the other one."
43 |
44 | fun getTrackingFailureReasonString(camera: Camera): String {
45 | val reason = camera.trackingFailureReason
46 | return when (reason) {
47 | TrackingFailureReason.NONE -> ""
48 | TrackingFailureReason.BAD_STATE -> BAD_STATE_MESSAGE
49 | TrackingFailureReason.INSUFFICIENT_LIGHT -> INSUFFICIENT_LIGHT_MESSAGE
50 | TrackingFailureReason.EXCESSIVE_MOTION -> EXCESSIVE_MOTION_MESSAGE
51 | TrackingFailureReason.INSUFFICIENT_FEATURES -> INSUFFICIENT_FEATURES_MESSAGE
52 | TrackingFailureReason.CAMERA_UNAVAILABLE -> CAMERA_UNAVAILABLE_MESSAGE
53 | }
54 | return "Unknown tracking failure reason: $reason"
55 | }
56 | }
57 | }
--------------------------------------------------------------------------------
/app/src/main/java/io/intelligible/arcoremlkit/common/samplerender/Framebuffer.kt:
--------------------------------------------------------------------------------
1 |
2 | package io.intelligible.arcoremlkit.common.samplerender
3 |
4 | import android.opengl.GLES30
5 | import android.util.Log
6 | import io.intelligible.arcoremlkit.common.samplerender.GLError.maybeLogGLError
7 | import io.intelligible.arcoremlkit.common.samplerender.GLError.maybeThrowGLException
8 | import java.io.Closeable
9 |
10 | /** A framebuffer associated with a texture. */
11 | class Framebuffer(render: SampleRender?, width: Int, height: Int) : Closeable {
12 | private val framebufferId = intArrayOf(0)
13 | /** Returns the color texture associated with this framebuffer. */
14 | var colorTexture: Texture? = null
15 | /** Returns the depth texture associated with this framebuffer. */
16 | var depthTexture: Texture? = null
17 | /** Returns the width of the framebuffer. */
18 | var width = -1
19 | private set
20 | /** Returns the height of the framebuffer. */
21 | var height = -1
22 | private set
23 |
24 | override fun close() {
25 | if (framebufferId[0] != 0) {
26 | GLES30.glDeleteFramebuffers(1, framebufferId, 0)
27 | maybeLogGLError(Log.WARN, TAG, "Failed to free framebuffer", "glDeleteFramebuffers")
28 | framebufferId[0] = 0
29 | }
30 | colorTexture!!.close()
31 | depthTexture!!.close()
32 | }
33 |
34 | /** Resizes the framebuffer to the given dimensions. */
35 | fun resize(width: Int, height: Int) {
36 | if (this.width == width && this.height == height) {
37 | return
38 | }
39 | this.width = width
40 | this.height = height
41 |
42 | // Color texture
43 | GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, colorTexture!!.textureId[0])
44 | maybeThrowGLException("Failed to bind color texture", "glBindTexture")
45 | GLES30.glTexImage2D(
46 | GLES30.GL_TEXTURE_2D, /*level=*/
47 | 0,
48 | GLES30.GL_RGBA,
49 | width,
50 | height, /*border=*/
51 | 0,
52 | GLES30.GL_RGBA,
53 | GLES30.GL_UNSIGNED_BYTE, /*pixels=*/
54 | null
55 | )
56 | maybeThrowGLException("Failed to specify color texture format", "glTexImage2D")
57 |
58 | // Depth texture
59 | GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, depthTexture!!.textureId[0])
60 | maybeThrowGLException("Failed to bind depth texture", "glBindTexture")
61 | GLES30.glTexImage2D(
62 | GLES30.GL_TEXTURE_2D, /*level=*/
63 | 0,
64 | GLES30.GL_DEPTH_COMPONENT32F,
65 | width,
66 | height, /*border=*/
67 | 0,
68 | GLES30.GL_DEPTH_COMPONENT,
69 | GLES30.GL_FLOAT, /*pixels=*/
70 | null
71 | )
72 | maybeThrowGLException("Failed to specify depth texture format", "glTexImage2D")
73 | }
74 |
75 | /* package-private */
76 | fun getFramebufferId(): Int {
77 | return framebufferId[0]
78 | }
79 |
80 | companion object {
81 | private val TAG = Framebuffer::class.java.simpleName
82 | }
83 |
84 | /**
85 | * Constructs a [Framebuffer] which renders internally to a texture.
86 | *
87 | *
88 | * In order to render to the [Framebuffer], use [SampleRender.draw].
89 | */
90 | init {
91 | try {
92 | colorTexture = Texture(
93 | render,
94 | Texture.Target.TEXTURE_2D,
95 | Texture.WrapMode.CLAMP_TO_EDGE, /*useMipmaps=*/
96 | false
97 | )
98 | depthTexture = Texture(
99 | render,
100 | Texture.Target.TEXTURE_2D,
101 | Texture.WrapMode.CLAMP_TO_EDGE, /*useMipmaps=*/
102 | false
103 | )
104 |
105 | // Set parameters of the depth texture so that it's readable by shaders.
106 | GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, depthTexture!!.getTextureId())
107 | maybeThrowGLException("Failed to bind depth texture", "glBindTexture")
108 | GLES30.glTexParameteri(
109 | GLES30.GL_TEXTURE_2D,
110 | GLES30.GL_TEXTURE_COMPARE_MODE,
111 | GLES30.GL_NONE
112 | )
113 | maybeThrowGLException("Failed to set texture parameter", "glTexParameteri")
114 | GLES30.glTexParameteri(
115 | GLES30.GL_TEXTURE_2D,
116 | GLES30.GL_TEXTURE_MIN_FILTER,
117 | GLES30.GL_NEAREST
118 | )
119 | maybeThrowGLException("Failed to set texture parameter", "glTexParameteri")
120 | GLES30.glTexParameteri(
121 | GLES30.GL_TEXTURE_2D,
122 | GLES30.GL_TEXTURE_MAG_FILTER,
123 | GLES30.GL_NEAREST
124 | )
125 | maybeThrowGLException("Failed to set texture parameter", "glTexParameteri")
126 |
127 | // Set initial dimensions.
128 | resize(width, height)
129 |
130 | // Create framebuffer object and bind to the color and depth textures.
131 | GLES30.glGenFramebuffers(1, framebufferId, 0)
132 | maybeThrowGLException("Framebuffer creation failed", "glGenFramebuffers")
133 | GLES30.glBindFramebuffer(GLES30.GL_FRAMEBUFFER, framebufferId[0])
134 | maybeThrowGLException("Failed to bind framebuffer", "glBindFramebuffer")
135 | GLES30.glFramebufferTexture2D(
136 | GLES30.GL_FRAMEBUFFER,
137 | GLES30.GL_COLOR_ATTACHMENT0,
138 | GLES30.GL_TEXTURE_2D,
139 | colorTexture!!.getTextureId(), /*level=*/
140 | 0
141 | )
142 | maybeThrowGLException(
143 | "Failed to bind color texture to framebuffer", "glFramebufferTexture2D"
144 | )
145 | GLES30.glFramebufferTexture2D(
146 | GLES30.GL_FRAMEBUFFER,
147 | GLES30.GL_DEPTH_ATTACHMENT,
148 | GLES30.GL_TEXTURE_2D,
149 | depthTexture!!.getTextureId(), /*level=*/
150 | 0
151 | )
152 | maybeThrowGLException(
153 | "Failed to bind depth texture to framebuffer", "glFramebufferTexture2D"
154 | )
155 | val status = GLES30.glCheckFramebufferStatus(GLES30.GL_FRAMEBUFFER)
156 | check(status == GLES30.GL_FRAMEBUFFER_COMPLETE) { "Framebuffer construction not complete: code $status" }
157 | } catch (t: Throwable) {
158 | close()
159 | throw t
160 | }
161 | }
162 | }
--------------------------------------------------------------------------------
/app/src/main/java/io/intelligible/arcoremlkit/common/samplerender/GLError.kt:
--------------------------------------------------------------------------------
1 |
2 | package io.intelligible.arcoremlkit.common.samplerender
3 |
4 | import android.opengl.GLES30
5 | import android.opengl.GLException
6 | import android.opengl.GLU
7 | import android.util.Log
8 | import java.util.*
9 |
10 | /** Module for handling OpenGL errors. */
11 | object GLError {
12 | /** Throws a [GLException] if a GL error occurred. */
13 | fun maybeThrowGLException(reason: String, api: String) {
14 | val errorCodes = glErrors
15 | if (errorCodes != null) {
16 | throw GLException(errorCodes[0], formatErrorMessage(reason, api, errorCodes))
17 | }
18 | }
19 |
20 | /** Logs a message with the given logcat priority if a GL error occurred. */
21 | fun maybeLogGLError(priority: Int, tag: String?, reason: String, api: String) {
22 | val errorCodes = glErrors
23 | if (errorCodes != null) {
24 | Log.println(priority, tag, formatErrorMessage(reason, api, errorCodes))
25 | }
26 | }
27 |
28 | private fun formatErrorMessage(reason: String, api: String, errorCodes: List): String {
29 | val builder = StringBuilder(String.format("%s: %s: ", reason, api))
30 | val iterator = errorCodes.iterator()
31 | while (iterator.hasNext()) {
32 | val errorCode = iterator.next()
33 | builder.append(String.format("%s (%d)", GLU.gluErrorString(errorCode), errorCode))
34 | if (iterator.hasNext()) {
35 | builder.append(", ")
36 | }
37 | }
38 | return builder.toString()
39 | }
40 |
41 | // Shortcut for no errors
42 | private val glErrors: List?
43 | private get() {
44 | var errorCode = GLES30.glGetError()
45 | // Shortcut for no errors
46 | if (errorCode == GLES30.GL_NO_ERROR) {
47 | return null
48 | }
49 | val errorCodes: MutableList = ArrayList()
50 | errorCodes.add(errorCode)
51 | while (true) {
52 | errorCode = GLES30.glGetError()
53 | if (errorCode == GLES30.GL_NO_ERROR) {
54 | break
55 | }
56 | errorCodes.add(errorCode)
57 | }
58 | return errorCodes
59 | }
60 | }
--------------------------------------------------------------------------------
/app/src/main/java/io/intelligible/arcoremlkit/common/samplerender/GpuBuffer.kt:
--------------------------------------------------------------------------------
1 |
2 | package io.intelligible.arcoremlkit.common.samplerender
3 |
4 | import android.opengl.GLES30
5 | import android.util.Log
6 | import io.intelligible.arcoremlkit.common.samplerender.GLError.maybeLogGLError
7 | import io.intelligible.arcoremlkit.common.samplerender.GLError.maybeThrowGLException
8 | import java.nio.Buffer
9 |
10 | /* package-private */
11 | internal class GpuBuffer(target: Int, numberOfBytesPerEntry: Int, entries: Buffer?) {
12 | private val target: Int
13 | private val numberOfBytesPerEntry: Int
14 | private val bufferId = intArrayOf(0)
15 | var size = 0
16 | private set
17 | private var capacity = 0
18 | fun set(entries: Buffer?) {
19 | // Some GPU drivers will fail with out of memory errors if glBufferData or glBufferSubData is
20 | // called with a size of 0, so avoid this case.
21 | if (entries == null || entries.limit() == 0) {
22 | size = 0
23 | return
24 | }
25 | require(entries.isDirect) { "If non-null, entries buffer must be a direct buffer" }
26 | GLES30.glBindBuffer(target, bufferId[0])
27 | maybeThrowGLException("Failed to bind vertex buffer object", "glBindBuffer")
28 | entries.rewind()
29 | if (entries.limit() <= capacity) {
30 | GLES30.glBufferSubData(target, 0, entries.limit() * numberOfBytesPerEntry, entries)
31 | maybeThrowGLException("Failed to populate vertex buffer object", "glBufferSubData")
32 | size = entries.limit()
33 | } else {
34 | GLES30.glBufferData(
35 | target, entries.limit() * numberOfBytesPerEntry, entries, GLES30.GL_DYNAMIC_DRAW
36 | )
37 | maybeThrowGLException("Failed to populate vertex buffer object", "glBufferData")
38 | size = entries.limit()
39 | capacity = entries.limit()
40 | }
41 | }
42 |
43 | fun free() {
44 | if (bufferId[0] != 0) {
45 | GLES30.glDeleteBuffers(1, bufferId, 0)
46 | maybeLogGLError(Log.WARN, TAG, "Failed to free buffer object", "glDeleteBuffers")
47 | bufferId[0] = 0
48 | }
49 | }
50 |
51 | fun getBufferId(): Int {
52 | return bufferId[0]
53 | }
54 |
55 | companion object {
56 | private val TAG = GpuBuffer::class.java.simpleName
57 |
58 | // These values refer to the byte count of the corresponding Java datatypes.
59 | const val INT_SIZE = 4
60 | const val FLOAT_SIZE = 4
61 | }
62 |
63 | init {
64 | var entries = entries
65 | if (entries != null) {
66 | require(entries.isDirect) { "If non-null, entries buffer must be a direct buffer" }
67 | // Some GPU drivers will fail with out of memory errors if glBufferData or glBufferSubData is
68 | // called with a size of 0, so avoid this case.
69 | if (entries.limit() == 0) {
70 | entries = null
71 | }
72 | }
73 | this.target = target
74 | this.numberOfBytesPerEntry = numberOfBytesPerEntry
75 | if (entries == null) {
76 | size = 0
77 | capacity = 0
78 | } else {
79 | size = entries.limit()
80 | capacity = entries.limit()
81 | }
82 | try {
83 | // Clear VAO to prevent unintended state change.
84 | GLES30.glBindVertexArray(0)
85 | maybeThrowGLException("Failed to unbind vertex array", "glBindVertexArray")
86 | GLES30.glGenBuffers(1, bufferId, 0)
87 | maybeThrowGLException("Failed to generate buffers", "glGenBuffers")
88 | GLES30.glBindBuffer(target, bufferId[0])
89 | maybeThrowGLException("Failed to bind buffer object", "glBindBuffer")
90 | if (entries != null) {
91 | entries.rewind()
92 | GLES30.glBufferData(
93 | target, entries.limit() * numberOfBytesPerEntry, entries, GLES30.GL_DYNAMIC_DRAW
94 | )
95 | }
96 | maybeThrowGLException("Failed to populate buffer object", "glBufferData")
97 | } catch (t: Throwable) {
98 | free()
99 | throw t
100 | }
101 | }
102 | }
--------------------------------------------------------------------------------
/app/src/main/java/io/intelligible/arcoremlkit/common/samplerender/IndexBuffer.kt:
--------------------------------------------------------------------------------
1 |
2 | package io.intelligible.arcoremlkit.common.samplerender
3 |
4 | import android.opengl.GLES30
5 | import java.io.Closeable
6 | import java.nio.IntBuffer
7 |
8 | /**
9 | * A list of vertex indices stored GPU-side.
10 | *
11 | *
12 | * When constructing a [Mesh], an [IndexBuffer] may be passed to describe the
13 | * ordering of vertices when drawing each primitive.
14 | *
15 | * @see [glDrawElements](https://www.khronos.org/registry/OpenGL-Refpages/es3.0/html/glDrawElements.xhtml)
16 | */
17 | class IndexBuffer(render: SampleRender?, entries: IntBuffer?) : Closeable {
18 | private val buffer: GpuBuffer
19 |
20 | /**
21 | * Populate with new data.
22 | *
23 | *
24 | * The entire buffer is replaced by the contents of the *direct* buffer `entries`
25 | * starting from the beginning of the buffer, not the current cursor position. The cursor will be
26 | * left in an undefined position after this function returns.
27 | *
28 | *
29 | * The GPU buffer is reallocated automatically if necessary.
30 | *
31 | *
32 | * The `entries` buffer may be null, in which case the buffer will become empty.
33 | */
34 | fun set(entries: IntBuffer?) {
35 | buffer.set(entries)
36 | }
37 |
38 | override fun close() {
39 | buffer.free()
40 | }
41 |
42 | /* package-private */
43 | val bufferId: Int
44 | get() = buffer.getBufferId()
45 |
46 | /* package-private */
47 | val size: Int
48 | get() = buffer.size
49 |
50 | /**
51 | * Construct an [IndexBuffer] populated with initial data.
52 | *
53 | *
54 | * The GPU buffer will be filled with the data in the *direct* buffer `entries`,
55 | * starting from the beginning of the buffer (not the current cursor position). The cursor will be
56 | * left in an undefined position after this function returns.
57 | *
58 | *
59 | * The `entries` buffer may be null, in which case an empty buffer is constructed
60 | * instead.
61 | */
62 | init {
63 | buffer = GpuBuffer(GLES30.GL_ELEMENT_ARRAY_BUFFER, GpuBuffer.INT_SIZE, entries)
64 | }
65 | }
--------------------------------------------------------------------------------
/app/src/main/java/io/intelligible/arcoremlkit/common/samplerender/Mesh.kt:
--------------------------------------------------------------------------------
1 |
2 | package io.intelligible.arcoremlkit.common.samplerender
3 |
4 | import android.opengl.GLES30
5 | import android.util.Log
6 | import de.javagl.obj.ObjData
7 | import de.javagl.obj.ObjReader
8 | import de.javagl.obj.ObjUtils
9 | import io.intelligible.arcoremlkit.common.samplerender.GLError.maybeLogGLError
10 | import io.intelligible.arcoremlkit.common.samplerender.GLError.maybeThrowGLException
11 | import java.io.Closeable
12 | import java.io.IOException
13 |
14 | /**
15 | * A collection of vertices, faces, and other attributes that define how to render a 3D object.
16 | *
17 | *
18 | * To render the mesh, use [SampleRender.draw].
19 | */
20 | class Mesh(
21 | render: SampleRender?,
22 | primitiveMode: PrimitiveMode,
23 | indexBuffer: IndexBuffer?,
24 | vertexBuffers: Array?
25 | ) : Closeable {
26 | /**
27 | * The kind of primitive to render.
28 | *
29 | *
30 | * This determines how the data in [VertexBuffer]s are interpreted. See [here](https://www.khronos.org/opengl/wiki/Primitive) for more on how primitives
31 | * behave.
32 | */
33 | enum class PrimitiveMode( /* package-private */
34 | val glesEnum: Int
35 | ) {
36 | POINTS(GLES30.GL_POINTS), LINE_STRIP(GLES30.GL_LINE_STRIP), LINE_LOOP(GLES30.GL_LINE_LOOP), LINES(
37 | GLES30.GL_LINES
38 | ),
39 | TRIANGLE_STRIP(GLES30.GL_TRIANGLE_STRIP), TRIANGLE_FAN(GLES30.GL_TRIANGLE_FAN), TRIANGLES(
40 | GLES30.GL_TRIANGLES
41 | );
42 | }
43 |
44 | private val vertexArrayId = intArrayOf(0)
45 | private val primitiveMode: PrimitiveMode
46 | private val indexBuffer: IndexBuffer?
47 | private val vertexBuffers: Array
48 | override fun close() {
49 | if (vertexArrayId[0] != 0) {
50 | GLES30.glDeleteVertexArrays(1, vertexArrayId, 0)
51 | maybeLogGLError(
52 | Log.WARN, TAG, "Failed to free vertex array object", "glDeleteVertexArrays"
53 | )
54 | }
55 | }
56 |
57 | /**
58 | * Draws the mesh. Don't call this directly unless you are doing low level OpenGL code; instead,
59 | * prefer [SampleRender.draw].
60 | */
61 | fun lowLevelDraw() {
62 | check(vertexArrayId[0] != 0) { "Tried to draw a freed Mesh" }
63 | GLES30.glBindVertexArray(vertexArrayId[0])
64 | maybeThrowGLException("Failed to bind vertex array object", "glBindVertexArray")
65 | if (indexBuffer == null) {
66 | // Sanity check for debugging
67 | val numberOfVertices = vertexBuffers[0].numberOfVertices
68 | for (i in 1 until vertexBuffers.size) {
69 | check(vertexBuffers[i].numberOfVertices == numberOfVertices) { "Vertex buffers have mismatching numbers of vertices" }
70 | }
71 | GLES30.glDrawArrays(primitiveMode.glesEnum, 0, numberOfVertices)
72 | maybeThrowGLException("Failed to draw vertex array object", "glDrawArrays")
73 | } else {
74 | GLES30.glDrawElements(
75 | primitiveMode.glesEnum, indexBuffer.size, GLES30.GL_UNSIGNED_INT, 0
76 | )
77 | maybeThrowGLException(
78 | "Failed to draw vertex array object with indices", "glDrawElements"
79 | )
80 | }
81 | }
82 |
83 | companion object {
84 | private val TAG = Mesh::class.java.simpleName
85 |
86 | /**
87 | * Constructs a [Mesh] from the given Wavefront OBJ file.
88 | *
89 | *
90 | * The [Mesh] will be constructed with three attributes, indexed in the order of local
91 | * coordinates (location 0, vec3), texture coordinates (location 1, vec2), and vertex normals
92 | * (location 2, vec3).
93 | */
94 | @Throws(IOException::class)
95 | fun createFromAsset(render: SampleRender, assetFileName: String?): Mesh {
96 | render.assets.open(assetFileName!!).use { inputStream ->
97 | val obj = ObjUtils.convertToRenderable(ObjReader.read(inputStream))
98 |
99 | // Obtain the data from the OBJ, as direct buffers:
100 | val vertexIndices = ObjData.getFaceVertexIndices(obj, /*numVerticesPerFace=*/3)
101 | val localCoordinates = ObjData.getVertices(obj)
102 | val textureCoordinates = ObjData.getTexCoords(obj, /*dimensions=*/2)
103 | val normals = ObjData.getNormals(obj)
104 | val vertexBuffers = arrayOf(
105 | VertexBuffer(render, 3, localCoordinates),
106 | VertexBuffer(render, 2, textureCoordinates),
107 | VertexBuffer(render, 3, normals)
108 | )
109 | val indexBuffer = IndexBuffer(render, vertexIndices)
110 | return Mesh(render, PrimitiveMode.TRIANGLES, indexBuffer, vertexBuffers)
111 | }
112 | }
113 | }
114 |
115 | /**
116 | * Construct a [Mesh].
117 | *
118 | *
119 | * The data in the given [IndexBuffer] and [VertexBuffer]s does not need to be
120 | * finalized; they may be freely changed throughout the lifetime of a [Mesh] using their
121 | * respective `set()` methods.
122 | *
123 | *
124 | * The ordering of the `vertexBuffers` is significant. Their array indices will
125 | * correspond to their attribute locations, which must be taken into account in shader code. The
126 | * [layout qualifier](https://www.khronos.org/opengl/wiki/Layout_Qualifier_(GLSL)) must
127 | * be used in the vertex shader code to explicitly associate attributes with these indices.
128 | */
129 | init {
130 | require(!(vertexBuffers == null || vertexBuffers.size == 0)) { "Must pass at least one vertex buffer" }
131 | this.primitiveMode = primitiveMode
132 | this.indexBuffer = indexBuffer
133 | this.vertexBuffers = vertexBuffers
134 | try {
135 | // Create vertex array
136 | GLES30.glGenVertexArrays(1, vertexArrayId, 0)
137 | maybeThrowGLException("Failed to generate a vertex array", "glGenVertexArrays")
138 |
139 | // Bind vertex array
140 | GLES30.glBindVertexArray(vertexArrayId[0])
141 | maybeThrowGLException("Failed to bind vertex array object", "glBindVertexArray")
142 | if (indexBuffer != null) {
143 | GLES30.glBindBuffer(GLES30.GL_ELEMENT_ARRAY_BUFFER, indexBuffer.bufferId)
144 | }
145 | for (i in vertexBuffers.indices) {
146 | // Bind each vertex buffer to vertex array
147 | GLES30.glBindBuffer(GLES30.GL_ARRAY_BUFFER, vertexBuffers[i].bufferId)
148 | maybeThrowGLException("Failed to bind vertex buffer", "glBindBuffer")
149 | GLES30.glVertexAttribPointer(
150 | i, vertexBuffers[i].numberOfEntriesPerVertex, GLES30.GL_FLOAT, false, 0, 0
151 | )
152 | maybeThrowGLException(
153 | "Failed to associate vertex buffer with vertex array", "glVertexAttribPointer"
154 | )
155 | GLES30.glEnableVertexAttribArray(i)
156 | maybeThrowGLException(
157 | "Failed to enable vertex buffer", "glEnableVertexAttribArray"
158 | )
159 | }
160 | } catch (t: Throwable) {
161 | close()
162 | throw t
163 | }
164 | }
165 | }
--------------------------------------------------------------------------------
/app/src/main/java/io/intelligible/arcoremlkit/common/samplerender/SampleRender.kt:
--------------------------------------------------------------------------------
1 |
2 | package io.intelligible.arcoremlkit.common.samplerender
3 |
4 | import android.content.res.AssetManager
5 | import android.opengl.GLES30
6 | import android.opengl.GLSurfaceView
7 | import io.intelligible.arcoremlkit.common.samplerender.GLError.maybeThrowGLException
8 | import io.intelligible.arcoremlkit.common.samplerender.SampleRender
9 | import javax.microedition.khronos.egl.EGLConfig
10 | import javax.microedition.khronos.opengles.GL10
11 |
12 | /** A SampleRender context. */
13 | class SampleRender(
14 | glSurfaceView: GLSurfaceView, renderer: Renderer, /* package-private */
15 | val assets: AssetManager
16 | ) {
17 | private var viewportWidth = 1
18 | private var viewportHeight = 1
19 | /**
20 | * Draw a [Mesh] with the specified [Shader] to the given [Framebuffer].
21 | *
22 | *
23 | * The `framebuffer` argument may be null, in which case the default framebuffer is used.
24 | */
25 | /** Draw a [Mesh] with the specified [Shader]. */
26 | @JvmOverloads
27 | fun draw(mesh: Mesh, shader: Shader, framebuffer: Framebuffer? = /*framebuffer=*/null) {
28 | useFramebuffer(framebuffer)
29 | shader.lowLevelUse()
30 | mesh.lowLevelDraw()
31 | }
32 |
33 | /**
34 | * Clear the given framebuffer.
35 | *
36 | *
37 | * The `framebuffer` argument may be null, in which case the default framebuffer is
38 | * cleared.
39 | */
40 | fun clear(framebuffer: Framebuffer?, r: Float, g: Float, b: Float, a: Float) {
41 | useFramebuffer(framebuffer)
42 | GLES30.glClearColor(r, g, b, a)
43 | maybeThrowGLException("Failed to set clear color", "glClearColor")
44 | GLES30.glDepthMask(true)
45 | maybeThrowGLException("Failed to set depth write mask", "glDepthMask")
46 | GLES30.glClear(GLES30.GL_COLOR_BUFFER_BIT or GLES30.GL_DEPTH_BUFFER_BIT)
47 | maybeThrowGLException("Failed to clear framebuffer", "glClear")
48 | }
49 |
50 | /** Interface to be implemented for rendering callbacks. */
51 | interface Renderer {
52 | /**
53 | * Called by [SampleRender] when the GL render surface is created.
54 | *
55 | *
56 | * See [GLSurfaceView.Renderer.onSurfaceCreated].
57 | */
58 | fun onSurfaceCreated(render: SampleRender?)
59 |
60 | /**
61 | * Called by [SampleRender] when the GL render surface dimensions are changed.
62 | *
63 | *
64 | * See [GLSurfaceView.Renderer.onSurfaceChanged].
65 | */
66 | fun onSurfaceChanged(render: SampleRender?, width: Int, height: Int)
67 |
68 | /**
69 | * Called by [SampleRender] when a GL frame is to be rendered.
70 | *
71 | *
72 | * See [GLSurfaceView.Renderer.onDrawFrame].
73 | */
74 | fun onDrawFrame(render: SampleRender?)
75 | }
76 |
77 | private fun useFramebuffer(framebuffer: Framebuffer?) {
78 | val framebufferId: Int
79 | val viewportWidth: Int
80 | val viewportHeight: Int
81 | if (framebuffer == null) {
82 | framebufferId = 0
83 | viewportWidth = this.viewportWidth
84 | viewportHeight = this.viewportHeight
85 | } else {
86 | framebufferId = framebuffer.getFramebufferId()
87 | viewportWidth = framebuffer.width
88 | viewportHeight = framebuffer.height
89 | }
90 | GLES30.glBindFramebuffer(GLES30.GL_FRAMEBUFFER, framebufferId)
91 | maybeThrowGLException("Failed to bind framebuffer", "glBindFramebuffer")
92 | GLES30.glViewport(0, 0, viewportWidth, viewportHeight)
93 | maybeThrowGLException("Failed to set viewport dimensions", "glViewport")
94 | }
95 |
96 | companion object {
97 | private val TAG = SampleRender::class.java.simpleName
98 | }
99 |
100 | /**
101 | * Constructs a SampleRender object and instantiates GLSurfaceView parameters.
102 | *
103 | * @param glSurfaceView Android GLSurfaceView
104 | * @param renderer Renderer implementation to receive callbacks
105 | * @param assetManager AssetManager for loading Android resources
106 | */
107 | init {
108 | glSurfaceView.preserveEGLContextOnPause = true
109 | glSurfaceView.setEGLContextClientVersion(3)
110 | glSurfaceView.setEGLConfigChooser(8, 8, 8, 8, 16, 0)
111 | glSurfaceView.setRenderer(
112 | object : GLSurfaceView.Renderer {
113 | override fun onSurfaceCreated(gl: GL10, config: EGLConfig) {
114 | GLES30.glEnable(GLES30.GL_BLEND)
115 | maybeThrowGLException("Failed to enable blending", "glEnable")
116 | renderer.onSurfaceCreated(this@SampleRender)
117 | }
118 |
119 | override fun onSurfaceChanged(gl: GL10, w: Int, h: Int) {
120 | viewportWidth = w
121 | viewportHeight = h
122 | renderer.onSurfaceChanged(this@SampleRender, w, h)
123 | }
124 |
125 | override fun onDrawFrame(gl: GL10) {
126 | clear( /*framebuffer=*/null, 0f, 0f, 0f, 1f)
127 | renderer.onDrawFrame(this@SampleRender)
128 | }
129 | })
130 | glSurfaceView.renderMode = GLSurfaceView.RENDERMODE_CONTINUOUSLY
131 | glSurfaceView.setWillNotDraw(false)
132 | }
133 | }
--------------------------------------------------------------------------------
/app/src/main/java/io/intelligible/arcoremlkit/common/samplerender/Shader.kt:
--------------------------------------------------------------------------------
1 |
2 | package io.intelligible.arcoremlkit.common.samplerender
3 |
4 | import android.opengl.GLES30
5 | import android.opengl.GLException
6 | import android.util.Log
7 | import io.intelligible.arcoremlkit.common.samplerender.GLError.maybeLogGLError
8 | import io.intelligible.arcoremlkit.common.samplerender.GLError.maybeThrowGLException
9 | import java.io.Closeable
10 | import java.io.IOException
11 | import java.io.InputStream
12 | import java.io.InputStreamReader
13 | import java.nio.charset.StandardCharsets
14 | import java.util.*
15 | import java.util.regex.Matcher
16 |
17 | /**
18 | * Represents a GPU shader, the state of its associated uniforms, and some additional draw state.
19 | */
20 | class Shader(
21 | render: SampleRender?,
22 | vertexShaderCode: String,
23 | fragmentShaderCode: String,
24 | defines: Map?
25 | ) : Closeable {
26 | /**
27 | * A factor to be used in a blend function.
28 | *
29 | * @see [glBlendFunc](https://www.khronos.org/registry/OpenGL-Refpages/es3.0/html/glBlendFunc.xhtml)
30 | */
31 | enum class BlendFactor( /* package-private */
32 | val glesEnum: Int
33 | ) {
34 | ZERO(GLES30.GL_ZERO), ONE(GLES30.GL_ONE), SRC_COLOR(GLES30.GL_SRC_COLOR), ONE_MINUS_SRC_COLOR(
35 | GLES30.GL_ONE_MINUS_SRC_COLOR
36 | ),
37 | DST_COLOR(GLES30.GL_DST_COLOR), ONE_MINUS_DST_COLOR(GLES30.GL_ONE_MINUS_DST_COLOR), SRC_ALPHA(
38 | GLES30.GL_SRC_ALPHA
39 | ),
40 | ONE_MINUS_SRC_ALPHA(GLES30.GL_ONE_MINUS_SRC_ALPHA), DST_ALPHA(GLES30.GL_DST_ALPHA), ONE_MINUS_DST_ALPHA(
41 | GLES30.GL_ONE_MINUS_DST_ALPHA
42 | ),
43 | CONSTANT_COLOR(GLES30.GL_CONSTANT_COLOR), ONE_MINUS_CONSTANT_COLOR(GLES30.GL_ONE_MINUS_CONSTANT_COLOR), CONSTANT_ALPHA(
44 | GLES30.GL_CONSTANT_ALPHA
45 | ),
46 | ONE_MINUS_CONSTANT_ALPHA(GLES30.GL_ONE_MINUS_CONSTANT_ALPHA);
47 | }
48 |
49 | private var programId = 0
50 | private val uniforms: MutableMap = HashMap()
51 | private var maxTextureUnit = 0
52 | private val uniformLocations: MutableMap = HashMap()
53 | private val uniformNames: MutableMap = HashMap()
54 | private var depthTest = true
55 | private var depthWrite = true
56 | private var sourceRgbBlend = BlendFactor.ONE
57 | private var destRgbBlend = BlendFactor.ZERO
58 | private var sourceAlphaBlend = BlendFactor.ONE
59 | private var destAlphaBlend = BlendFactor.ZERO
60 | override fun close() {
61 | if (programId != 0) {
62 | GLES30.glDeleteProgram(programId)
63 | programId = 0
64 | }
65 | }
66 |
67 | /**
68 | * Sets depth test state.
69 | *
70 | * @see [glEnable
71 | ](https://www.khronos.org/registry/OpenGL-Refpages/es3.0/html/glEnable.xhtml) */
72 | fun setDepthTest(depthTest: Boolean): Shader {
73 | this.depthTest = depthTest
74 | return this
75 | }
76 |
77 | /**
78 | * Sets depth write state.
79 | *
80 | * @see [glDepthMask](https://www.khronos.org/registry/OpenGL-Refpages/es3.0/html/glDepthMask.xhtml).
81 | */
82 | fun setDepthWrite(depthWrite: Boolean): Shader {
83 | this.depthWrite = depthWrite
84 | return this
85 | }
86 |
87 | /**
88 | * Sets blending function.
89 | *
90 | * @see [glBlendFunc](https://www.khronos.org/registry/OpenGL-Refpages/gl4/html/glBlendFunc.xhtml)
91 | */
92 | fun setBlend(sourceBlend: BlendFactor, destBlend: BlendFactor): Shader {
93 | sourceRgbBlend = sourceBlend
94 | destRgbBlend = destBlend
95 | sourceAlphaBlend = sourceBlend
96 | destAlphaBlend = destBlend
97 | return this
98 | }
99 |
100 | /**
101 | * Sets blending functions separately for RGB and alpha channels.
102 | *
103 | * @see [glBlendFunc](https://www.khronos.org/registry/OpenGL-Refpages/es3.0/html/glBlendFuncSeparate.xhtml)
104 | */
105 | fun setBlend(
106 | sourceRgbBlend: BlendFactor,
107 | destRgbBlend: BlendFactor,
108 | sourceAlphaBlend: BlendFactor,
109 | destAlphaBlend: BlendFactor
110 | ): Shader {
111 | this.sourceRgbBlend = sourceRgbBlend
112 | this.destRgbBlend = destRgbBlend
113 | this.sourceAlphaBlend = sourceAlphaBlend
114 | this.destAlphaBlend = destAlphaBlend
115 | return this
116 | }
117 |
118 | /** Sets a texture uniform. */
119 | fun setTexture(name: String, texture: Texture): Shader {
120 | // Special handling for Textures. If replacing an existing texture uniform, reuse the texture
121 | // unit.
122 | val location = getUniformLocation(name)
123 | val uniform = uniforms[location]
124 | val textureUnit: Int
125 | textureUnit = if (uniform !is UniformTexture) {
126 | maxTextureUnit++
127 | } else {
128 | uniform.textureUnit
129 | }
130 | uniforms[location] = UniformTexture(textureUnit, texture)
131 | return this
132 | }
133 |
134 | /** Sets a `bool` uniform. */
135 | fun setBool(name: String, v0: Boolean): Shader {
136 | val values = intArrayOf(if (v0) 1 else 0)
137 | uniforms[getUniformLocation(name)] = UniformInt(values)
138 | return this
139 | }
140 |
141 | /** Sets an `int` uniform. */
142 | fun setInt(name: String, v0: Int): Shader {
143 | val values = intArrayOf(v0)
144 | uniforms[getUniformLocation(name)] = UniformInt(values)
145 | return this
146 | }
147 |
148 | /** Sets a `float` uniform. */
149 | fun setFloat(name: String, v0: Float): Shader {
150 | val values = floatArrayOf(v0)
151 | uniforms[getUniformLocation(name)] = Uniform1f(values)
152 | return this
153 | }
154 |
155 | /** Sets a `vec2` uniform. */
156 | fun setVec2(name: String, values: FloatArray): Shader {
157 | require(values.size == 2) { "Value array length must be 2" }
158 | uniforms[getUniformLocation(name)] = Uniform2f(values.clone())
159 | return this
160 | }
161 |
162 | /** Sets a `vec3` uniform. */
163 | fun setVec3(name: String, values: FloatArray): Shader {
164 | require(values.size == 3) { "Value array length must be 3" }
165 | uniforms[getUniformLocation(name)] = Uniform3f(values.clone())
166 | return this
167 | }
168 |
169 | /** Sets a `vec4` uniform. */
170 | fun setVec4(name: String, values: FloatArray): Shader {
171 | require(values.size == 4) { "Value array length must be 4" }
172 | uniforms[getUniformLocation(name)] = Uniform4f(values.clone())
173 | return this
174 | }
175 |
176 | /** Sets a `mat2` uniform. */
177 | fun setMat2(name: String, values: FloatArray): Shader {
178 | require(values.size == 4) { "Value array length must be 4 (2x2)" }
179 | uniforms[getUniformLocation(name)] = UniformMatrix2f(values.clone())
180 | return this
181 | }
182 |
183 | /** Sets a `mat3` uniform. */
184 | fun setMat3(name: String, values: FloatArray): Shader {
185 | require(values.size == 9) { "Value array length must be 9 (3x3)" }
186 | uniforms[getUniformLocation(name)] = UniformMatrix3f(values.clone())
187 | return this
188 | }
189 |
190 | /** Sets a `mat4` uniform. */
191 | fun setMat4(name: String, values: FloatArray): Shader {
192 | require(values.size == 16) { "Value array length must be 16 (4x4)" }
193 | uniforms[getUniformLocation(name)] = UniformMatrix4f(values.clone())
194 | return this
195 | }
196 |
197 | /** Sets a `bool` array uniform. */
198 | fun setBoolArray(name: String, values: BooleanArray): Shader {
199 | val intValues = IntArray(values.size)
200 | for (i in values.indices) {
201 | intValues[i] = if (values[i]) 1 else 0
202 | }
203 | uniforms[getUniformLocation(name)] = UniformInt(intValues)
204 | return this
205 | }
206 |
207 | /** Sets an `int` array uniform. */
208 | fun setIntArray(name: String, values: IntArray): Shader {
209 | uniforms[getUniformLocation(name)] = UniformInt(values.clone())
210 | return this
211 | }
212 |
213 | /** Sets a `float` array uniform. */
214 | fun setFloatArray(name: String, values: FloatArray): Shader {
215 | uniforms[getUniformLocation(name)] = Uniform1f(values.clone())
216 | return this
217 | }
218 |
219 | /** Sets a `vec2` array uniform. */
220 | fun setVec2Array(name: String, values: FloatArray): Shader {
221 | require(values.size % 2 == 0) { "Value array length must be divisible by 2" }
222 | uniforms[getUniformLocation(name)] = Uniform2f(values.clone())
223 | return this
224 | }
225 |
226 | /** Sets a `vec3` array uniform. */
227 | fun setVec3Array(name: String, values: FloatArray): Shader {
228 | require(values.size % 3 == 0) { "Value array length must be divisible by 3" }
229 | uniforms[getUniformLocation(name)] = Uniform3f(values.clone())
230 | return this
231 | }
232 |
233 | /** Sets a `vec4` array uniform. */
234 | fun setVec4Array(name: String, values: FloatArray): Shader {
235 | require(values.size % 4 == 0) { "Value array length must be divisible by 4" }
236 | uniforms[getUniformLocation(name)] = Uniform4f(values.clone())
237 | return this
238 | }
239 |
240 | /** Sets a `mat2` array uniform. */
241 | fun setMat2Array(name: String, values: FloatArray): Shader {
242 | require(values.size % 4 == 0) { "Value array length must be divisible by 4 (2x2)" }
243 | uniforms[getUniformLocation(name)] = UniformMatrix2f(values.clone())
244 | return this
245 | }
246 |
247 | /** Sets a `mat3` array uniform. */
248 | fun setMat3Array(name: String, values: FloatArray): Shader {
249 | require(values.size % 9 == 0) { "Values array length must be divisible by 9 (3x3)" }
250 | uniforms[getUniformLocation(name)] = UniformMatrix3f(values.clone())
251 | return this
252 | }
253 |
254 | /** Sets a `mat4` uniform. */
255 | fun setMat4Array(name: String, values: FloatArray): Shader {
256 | require(values.size % 16 == 0) { "Value array length must be divisible by 16 (4x4)" }
257 | uniforms[getUniformLocation(name)] = UniformMatrix4f(values.clone())
258 | return this
259 | }
260 |
261 | /**
262 | * Activates the shader. Don't call this directly unless you are doing low level OpenGL code;
263 | * instead, prefer [SampleRender.draw].
264 | */
265 | fun lowLevelUse() {
266 | // Make active shader/set uniforms
267 | check(programId != 0) { "Attempted to use freed shader" }
268 | GLES30.glUseProgram(programId)
269 | maybeThrowGLException("Failed to use shader program", "glUseProgram")
270 | GLES30.glBlendFuncSeparate(
271 | sourceRgbBlend.glesEnum,
272 | destRgbBlend.glesEnum,
273 | sourceAlphaBlend.glesEnum,
274 | destAlphaBlend.glesEnum
275 | )
276 | maybeThrowGLException("Failed to set blend mode", "glBlendFuncSeparate")
277 | GLES30.glDepthMask(depthWrite)
278 | maybeThrowGLException("Failed to set depth write mask", "glDepthMask")
279 | if (depthTest) {
280 | GLES30.glEnable(GLES30.GL_DEPTH_TEST)
281 | maybeThrowGLException("Failed to enable depth test", "glEnable")
282 | } else {
283 | GLES30.glDisable(GLES30.GL_DEPTH_TEST)
284 | maybeThrowGLException("Failed to disable depth test", "glDisable")
285 | }
286 | try {
287 | // Remove all non-texture uniforms from the map after setting them, since they're stored as
288 | // part of the program.
289 | val obsoleteEntries = ArrayList(uniforms.size)
290 | for ((key, value) in uniforms) {
291 | try {
292 | value.use(key)
293 | if (value !is UniformTexture) {
294 | obsoleteEntries.add(key)
295 | }
296 | } catch (e: GLException) {
297 | val name = uniformNames[key]
298 | throw IllegalArgumentException("Error setting uniform `$name'", e)
299 | }
300 | }
301 | uniforms.keys.removeAll(obsoleteEntries)
302 | } finally {
303 | GLES30.glActiveTexture(GLES30.GL_TEXTURE0)
304 | maybeLogGLError(Log.WARN, TAG, "Failed to set active texture", "glActiveTexture")
305 | }
306 | }
307 |
308 | private interface Uniform {
309 | fun use(location: Int)
310 | }
311 |
312 | private class UniformTexture(val textureUnit: Int, private val texture: Texture) : Uniform {
313 | override fun use(location: Int) {
314 | check(texture.textureId[0] != 0) { "Tried to draw with freed texture" }
315 | GLES30.glActiveTexture(GLES30.GL_TEXTURE0 + textureUnit)
316 | maybeThrowGLException("Failed to set active texture", "glActiveTexture")
317 | GLES30.glBindTexture(texture.target.glesEnum, texture.textureId[0])
318 | maybeThrowGLException("Failed to bind texture", "glBindTexture")
319 | GLES30.glUniform1i(location, textureUnit)
320 | maybeThrowGLException("Failed to set shader texture uniform", "glUniform1i")
321 | }
322 | }
323 |
324 | private class UniformInt(private val values: IntArray) : Uniform {
325 | override fun use(location: Int) {
326 | GLES30.glUniform1iv(location, values.size, values, 0)
327 | maybeThrowGLException("Failed to set shader uniform 1i", "glUniform1iv")
328 | }
329 | }
330 |
331 | private class Uniform1f(private val values: FloatArray) : Uniform {
332 | override fun use(location: Int) {
333 | GLES30.glUniform1fv(location, values.size, values, 0)
334 | maybeThrowGLException("Failed to set shader uniform 1f", "glUniform1fv")
335 | }
336 | }
337 |
338 | private class Uniform2f(private val values: FloatArray) : Uniform {
339 | override fun use(location: Int) {
340 | GLES30.glUniform2fv(location, values.size / 2, values, 0)
341 | maybeThrowGLException("Failed to set shader uniform 2f", "glUniform2fv")
342 | }
343 | }
344 |
345 | private class Uniform3f(private val values: FloatArray) : Uniform {
346 | override fun use(location: Int) {
347 | GLES30.glUniform3fv(location, values.size / 3, values, 0)
348 | maybeThrowGLException("Failed to set shader uniform 3f", "glUniform3fv")
349 | }
350 | }
351 |
352 | private class Uniform4f(private val values: FloatArray) : Uniform {
353 | override fun use(location: Int) {
354 | GLES30.glUniform4fv(location, values.size / 4, values, 0)
355 | maybeThrowGLException("Failed to set shader uniform 4f", "glUniform4fv")
356 | }
357 | }
358 |
359 | private class UniformMatrix2f(private val values: FloatArray) : Uniform {
360 | override fun use(location: Int) {
361 | GLES30.glUniformMatrix2fv(location, values.size / 4, /*transpose=*/false, values, 0)
362 | maybeThrowGLException("Failed to set shader uniform matrix 2f", "glUniformMatrix2fv")
363 | }
364 | }
365 |
366 | private class UniformMatrix3f(private val values: FloatArray) : Uniform {
367 | override fun use(location: Int) {
368 | GLES30.glUniformMatrix3fv(location, values.size / 9, /*transpose=*/false, values, 0)
369 | maybeThrowGLException("Failed to set shader uniform matrix 3f", "glUniformMatrix3fv")
370 | }
371 | }
372 |
373 | private class UniformMatrix4f(private val values: FloatArray) : Uniform {
374 | override fun use(location: Int) {
375 | GLES30.glUniformMatrix4fv(location, values.size / 16, /*transpose=*/false, values, 0)
376 | maybeThrowGLException("Failed to set shader uniform matrix 4f", "glUniformMatrix4fv")
377 | }
378 | }
379 |
380 | private fun getUniformLocation(name: String): Int {
381 | val locationObject = uniformLocations[name]
382 | if (locationObject != null) {
383 | return locationObject
384 | }
385 | val location = GLES30.glGetUniformLocation(programId, name)
386 | maybeThrowGLException("Failed to find uniform", "glGetUniformLocation")
387 | require(location != -1) { "Shader uniform does not exist: $name" }
388 | uniformLocations[name] = Integer.valueOf(location)
389 | uniformNames[Integer.valueOf(location)] = name
390 | return location
391 | }
392 |
393 | companion object {
394 | private val TAG = Shader::class.java.simpleName
395 |
396 | /**
397 | * Creates a [Shader] from the given asset file names.
398 | *
399 | *
400 | * The file contents are interpreted as UTF-8 text.
401 | *
402 | * @param defines A map of shader precompiler symbols to be defined with the given names and
403 | * values
404 | */
405 | @Throws(IOException::class)
406 | fun createFromAssets(
407 | render: SampleRender,
408 | vertexShaderFileName: String?,
409 | fragmentShaderFileName: String?,
410 | defines: Map?
411 | ): Shader {
412 | val assets = render.assets
413 | return Shader(
414 | render,
415 | inputStreamToString(
416 | assets.open(
417 | vertexShaderFileName!!
418 | )
419 | ),
420 | inputStreamToString(
421 | assets.open(
422 | fragmentShaderFileName!!
423 | )
424 | ),
425 | defines
426 | )
427 | }
428 |
429 | private fun createShader(type: Int, code: String): Int {
430 | val shaderId = GLES30.glCreateShader(type)
431 | maybeThrowGLException("Shader creation failed", "glCreateShader")
432 | GLES30.glShaderSource(shaderId, code)
433 | maybeThrowGLException("Shader source failed", "glShaderSource")
434 | GLES30.glCompileShader(shaderId)
435 | maybeThrowGLException("Shader compilation failed", "glCompileShader")
436 | val compileStatus = IntArray(1)
437 | GLES30.glGetShaderiv(shaderId, GLES30.GL_COMPILE_STATUS, compileStatus, 0)
438 | if (compileStatus[0] == GLES30.GL_FALSE) {
439 | val infoLog = GLES30.glGetShaderInfoLog(shaderId)
440 | maybeLogGLError(
441 | Log.WARN, TAG, "Failed to retrieve shader info log", "glGetShaderInfoLog"
442 | )
443 | GLES30.glDeleteShader(shaderId)
444 | maybeLogGLError(Log.WARN, TAG, "Failed to free shader", "glDeleteShader")
445 | throw GLException(0, "Shader compilation failed: $infoLog")
446 | }
447 | return shaderId
448 | }
449 |
450 | private fun createShaderDefinesCode(defines: Map?): String {
451 | if (defines == null) {
452 | return ""
453 | }
454 | val builder = StringBuilder()
455 | for ((key, value) in defines) {
456 | builder.append(
457 | """#define $key $value
458 | """
459 | )
460 | }
461 | return builder.toString()
462 | }
463 |
464 | private fun insertShaderDefinesCode(sourceCode: String, definesCode: String): String {
465 | val result = sourceCode.replace(
466 | "(?m)^(\\s*#\\s*version\\s+.*)$".toRegex(), """
467 | $1
468 | ${Matcher.quoteReplacement(definesCode)}
469 | """.trimIndent()
470 | )
471 | return if (result == sourceCode) {
472 | // No #version specified, so just prepend source
473 | definesCode + sourceCode
474 | } else result
475 | }
476 |
477 | @Throws(IOException::class)
478 | private fun inputStreamToString(stream: InputStream): String {
479 | val reader = InputStreamReader(stream, StandardCharsets.UTF_8.name())
480 | val buffer = CharArray(1024 * 4)
481 | val builder = StringBuilder()
482 | var amount = 0
483 | while (reader.read(buffer).also { amount = it } != -1) {
484 | builder.append(buffer, 0, amount)
485 | }
486 | reader.close()
487 | return builder.toString()
488 | }
489 | }
490 |
491 | /**
492 | * Constructs a [Shader] given the shader code.
493 | *
494 | * @param defines A map of shader precompiler symbols to be defined with the given names and
495 | * values
496 | */
497 | init {
498 | var vertexShaderId = 0
499 | var fragmentShaderId = 0
500 | val definesCode = createShaderDefinesCode(defines)
501 | try {
502 | vertexShaderId = createShader(
503 | GLES30.GL_VERTEX_SHADER, insertShaderDefinesCode(vertexShaderCode, definesCode)
504 | )
505 | fragmentShaderId = createShader(
506 | GLES30.GL_FRAGMENT_SHADER, insertShaderDefinesCode(fragmentShaderCode, definesCode)
507 | )
508 | programId = GLES30.glCreateProgram()
509 | maybeThrowGLException("Shader program creation failed", "glCreateProgram")
510 | GLES30.glAttachShader(programId, vertexShaderId)
511 | maybeThrowGLException("Failed to attach vertex shader", "glAttachShader")
512 | GLES30.glAttachShader(programId, fragmentShaderId)
513 | maybeThrowGLException("Failed to attach fragment shader", "glAttachShader")
514 | GLES30.glLinkProgram(programId)
515 | maybeThrowGLException("Failed to link shader program", "glLinkProgram")
516 | val linkStatus = IntArray(1)
517 | GLES30.glGetProgramiv(programId, GLES30.GL_LINK_STATUS, linkStatus, 0)
518 | if (linkStatus[0] == GLES30.GL_FALSE) {
519 | val infoLog = GLES30.glGetProgramInfoLog(programId)
520 | maybeLogGLError(
521 | Log.WARN,
522 | TAG,
523 | "Failed to retrieve shader program info log",
524 | "glGetProgramInfoLog"
525 | )
526 | throw GLException(0, "Shader link failed: $infoLog")
527 | }
528 | } catch (t: Throwable) {
529 | close()
530 | throw t
531 | } finally {
532 | // Shader objects can be flagged for deletion immediately after program creation.
533 | if (vertexShaderId != 0) {
534 | GLES30.glDeleteShader(vertexShaderId)
535 | maybeLogGLError(Log.WARN, TAG, "Failed to free vertex shader", "glDeleteShader")
536 | }
537 | if (fragmentShaderId != 0) {
538 | GLES30.glDeleteShader(fragmentShaderId)
539 | maybeLogGLError(Log.WARN, TAG, "Failed to free fragment shader", "glDeleteShader")
540 | }
541 | }
542 | }
543 | }
--------------------------------------------------------------------------------
/app/src/main/java/io/intelligible/arcoremlkit/common/samplerender/Texture.kt:
--------------------------------------------------------------------------------
1 |
2 | package io.intelligible.arcoremlkit.common.samplerender
3 |
4 | import android.graphics.Bitmap
5 | import android.graphics.BitmapFactory
6 | import android.opengl.GLES11Ext
7 | import android.opengl.GLES30
8 | import android.util.Log
9 | import io.intelligible.arcoremlkit.common.samplerender.GLError.maybeLogGLError
10 | import io.intelligible.arcoremlkit.common.samplerender.GLError.maybeThrowGLException
11 | import java.io.Closeable
12 | import java.io.IOException
13 | import java.nio.ByteBuffer
14 |
15 | /** A GPU-side texture. */
16 | class Texture @JvmOverloads constructor(
17 | render: SampleRender?, /* package-private */
18 | val target: Target, wrapMode: WrapMode, useMipmaps: Boolean = /*useMipmaps=*/true
19 | ) : Closeable {
20 | val textureId = intArrayOf(0)
21 |
22 | /**
23 | * Describes the way the texture's edges are rendered.
24 | *
25 | * @see [GL_TEXTURE_WRAP_S](https://www.khronos.org/registry/OpenGL-Refpages/es3.0/html/glTexParameter.xhtml).
26 | */
27 | enum class WrapMode( /* package-private */
28 | val glesEnum: Int
29 | ) {
30 | CLAMP_TO_EDGE(GLES30.GL_CLAMP_TO_EDGE), MIRRORED_REPEAT(GLES30.GL_MIRRORED_REPEAT), REPEAT(
31 | GLES30.GL_REPEAT
32 | );
33 | }
34 |
35 | /**
36 | * Describes the target this texture is bound to.
37 | *
38 | * @see [glBindTexture](https://www.khronos.org/registry/OpenGL-Refpages/es3.0/html/glBindTexture.xhtml).
39 | */
40 | enum class Target(val glesEnum: Int) {
41 | TEXTURE_2D(GLES30.GL_TEXTURE_2D), TEXTURE_EXTERNAL_OES(GLES11Ext.GL_TEXTURE_EXTERNAL_OES), TEXTURE_CUBE_MAP(
42 | GLES30.GL_TEXTURE_CUBE_MAP
43 | );
44 | }
45 |
46 | /**
47 | * Describes the color format of the texture.
48 | *
49 | * @see [glTexImage2d](https://www.khronos.org/registry/OpenGL-Refpages/es3.0/html/glTexImage2D.xhtml).
50 | */
51 | enum class ColorFormat(val glesEnum: Int) {
52 | LINEAR(GLES30.GL_RGBA8), SRGB(GLES30.GL_SRGB8_ALPHA8);
53 | }
54 |
55 | override fun close() {
56 | if (textureId[0] != 0) {
57 | GLES30.glDeleteTextures(1, textureId, 0)
58 | maybeLogGLError(Log.WARN, TAG, "Failed to free texture", "glDeleteTextures")
59 | textureId[0] = 0
60 | }
61 | }
62 |
63 | /** Retrieve the native texture ID. */
64 | fun getTextureId(): Int {
65 | return textureId[0]
66 | }
67 |
68 | companion object {
69 | private val TAG = Texture::class.java.simpleName
70 |
71 | /** Create a texture from the given asset file name. */
72 | @Throws(IOException::class)
73 | fun createFromAsset(
74 | render: SampleRender,
75 | assetFileName: String?,
76 | wrapMode: WrapMode,
77 | colorFormat: ColorFormat
78 | ): Texture {
79 | val texture = Texture(render, Target.TEXTURE_2D, wrapMode)
80 | var bitmap: Bitmap? = null
81 | try {
82 | // The following lines up to glTexImage2D could technically be replaced with
83 | // GLUtils.texImage2d, but this method does not allow for loading sRGB images.
84 |
85 | // Load and convert the bitmap and copy its contents to a direct ByteBuffer. Despite its name,
86 | // the ARGB_8888 config is actually stored in RGBA order.
87 | bitmap = convertBitmapToConfig(
88 | BitmapFactory.decodeStream(render.assets.open(assetFileName!!)),
89 | Bitmap.Config.ARGB_8888
90 | )
91 | val buffer = ByteBuffer.allocateDirect(bitmap.byteCount)
92 | bitmap.copyPixelsToBuffer(buffer)
93 | buffer.rewind()
94 | GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, texture.getTextureId())
95 | maybeThrowGLException("Failed to bind texture", "glBindTexture")
96 | GLES30.glTexImage2D(
97 | GLES30.GL_TEXTURE_2D, /*level=*/
98 | 0,
99 | colorFormat.glesEnum,
100 | bitmap.width,
101 | bitmap.height, /*border=*/
102 | 0,
103 | GLES30.GL_RGBA,
104 | GLES30.GL_UNSIGNED_BYTE,
105 | buffer
106 | )
107 | maybeThrowGLException("Failed to populate texture data", "glTexImage2D")
108 | GLES30.glGenerateMipmap(GLES30.GL_TEXTURE_2D)
109 | maybeThrowGLException("Failed to generate mipmaps", "glGenerateMipmap")
110 | } catch (t: Throwable) {
111 | texture.close()
112 | throw t
113 | } finally {
114 | bitmap?.recycle()
115 | }
116 | return texture
117 | }
118 |
119 | private fun convertBitmapToConfig(bitmap: Bitmap, config: Bitmap.Config): Bitmap {
120 | // We use this method instead of BitmapFactory.Options.outConfig to support a minimum of Android
121 | // API level 24.
122 | if (bitmap.config == config) {
123 | return bitmap
124 | }
125 | val result = bitmap.copy(config, /*isMutable=*/false)
126 | bitmap.recycle()
127 | return result
128 | }
129 | }
130 |
131 | /**
132 | * Construct an empty [Texture].
133 | *
134 | *
135 | * Since [Texture]s created in this way are not populated with data, this method is
136 | * mostly only useful for creating [Target.TEXTURE_EXTERNAL_OES] textures. See [ ][.createFromAsset] if you want a texture with data.
137 | */
138 | init {
139 | GLES30.glGenTextures(1, textureId, 0)
140 | maybeThrowGLException("Texture creation failed", "glGenTextures")
141 | val minFilter = if (useMipmaps) GLES30.GL_LINEAR_MIPMAP_LINEAR else GLES30.GL_LINEAR
142 | try {
143 | GLES30.glBindTexture(target.glesEnum, textureId[0])
144 | maybeThrowGLException("Failed to bind texture", "glBindTexture")
145 | GLES30.glTexParameteri(target.glesEnum, GLES30.GL_TEXTURE_MIN_FILTER, minFilter)
146 | maybeThrowGLException("Failed to set texture parameter", "glTexParameteri")
147 | GLES30.glTexParameteri(target.glesEnum, GLES30.GL_TEXTURE_MAG_FILTER, GLES30.GL_LINEAR)
148 | maybeThrowGLException("Failed to set texture parameter", "glTexParameteri")
149 | GLES30.glTexParameteri(target.glesEnum, GLES30.GL_TEXTURE_WRAP_S, wrapMode.glesEnum)
150 | maybeThrowGLException("Failed to set texture parameter", "glTexParameteri")
151 | GLES30.glTexParameteri(target.glesEnum, GLES30.GL_TEXTURE_WRAP_T, wrapMode.glesEnum)
152 | maybeThrowGLException("Failed to set texture parameter", "glTexParameteri")
153 | } catch (t: Throwable) {
154 | close()
155 | throw t
156 | }
157 | }
158 | }
--------------------------------------------------------------------------------
/app/src/main/java/io/intelligible/arcoremlkit/common/samplerender/VertexBuffer.kt:
--------------------------------------------------------------------------------
1 |
2 | package io.intelligible.arcoremlkit.common.samplerender
3 |
4 | import android.opengl.GLES30
5 | import java.io.Closeable
6 | import java.nio.FloatBuffer
7 |
8 | /**
9 | * A list of vertex attribute data stored GPU-side.
10 | *
11 | *
12 | * One or more [VertexBuffer]s are used when constructing a [Mesh] to describe vertex
13 | * attribute data; for example, local coordinates, texture coordinates, vertex normals, etc.
14 | *
15 | * @see [glVertexAttribPointer](https://www.khronos.org/registry/OpenGL-Refpages/es3.0/html/glVertexAttribPointer.xhtml)
16 | */
17 | class VertexBuffer(render: SampleRender?, numberOfEntriesPerVertex: Int, entries: FloatBuffer?) :
18 | Closeable {
19 | private val buffer: GpuBuffer
20 | /* package-private */ val numberOfEntriesPerVertex: Int
21 |
22 | /**
23 | * Populate with new data.
24 | *
25 | *
26 | * The entire buffer is replaced by the contents of the *direct* buffer `entries`
27 | * starting from the beginning of the buffer, not the current cursor position. The cursor will be
28 | * left in an undefined position after this function returns.
29 | *
30 | *
31 | * The GPU buffer is reallocated automatically if necessary.
32 | *
33 | *
34 | * The `entries` buffer may be null, in which case the buffer will become empty.
35 | * Otherwise, the size of `entries` must be divisible by the number of entries per vertex
36 | * specified during construction.
37 | */
38 | fun set(entries: FloatBuffer?) {
39 | require(!(entries != null && entries.limit() % numberOfEntriesPerVertex != 0)) {
40 | ("If non-null, vertex buffer data must be divisible by the number of data points per"
41 | + " vertex")
42 | }
43 | buffer.set(entries)
44 | }
45 |
46 | override fun close() {
47 | buffer.free()
48 | }
49 |
50 | /* package-private */
51 | val bufferId: Int
52 | get() = buffer.getBufferId()
53 |
54 | /* package-private */
55 | val numberOfVertices: Int
56 | get() = buffer.size / numberOfEntriesPerVertex
57 |
58 | /**
59 | * Construct a [VertexBuffer] populated with initial data.
60 | *
61 | *
62 | * The GPU buffer will be filled with the data in the *direct* buffer `entries`,
63 | * starting from the beginning of the buffer (not the current cursor position). The cursor will be
64 | * left in an undefined position after this function returns.
65 | *
66 | *
67 | * The number of vertices in the buffer can be expressed as `entries.limit() /
68 | * numberOfEntriesPerVertex`. Thus, The size of the buffer must be divisible by `numberOfEntriesPerVertex`.
69 | *
70 | *
71 | * The `entries` buffer may be null, in which case an empty buffer is constructed
72 | * instead.
73 | */
74 | init {
75 | require(!(entries != null && entries.limit() % numberOfEntriesPerVertex != 0)) {
76 | ("If non-null, vertex buffer data must be divisible by the number of data points per"
77 | + " vertex")
78 | }
79 | this.numberOfEntriesPerVertex = numberOfEntriesPerVertex
80 | buffer = GpuBuffer(GLES30.GL_ARRAY_BUFFER, GpuBuffer.FLOAT_SIZE, entries)
81 | }
82 | }
--------------------------------------------------------------------------------
/app/src/main/java/io/intelligible/arcoremlkit/common/samplerender/arcore/BackgroundRenderer.kt:
--------------------------------------------------------------------------------
1 |
2 | package io.intelligible.arcoremlkit.common.samplerender.arcore
3 |
4 | import android.media.Image
5 | import android.opengl.GLES30
6 | import com.google.ar.core.Coordinates2d
7 | import com.google.ar.core.Frame
8 | import io.intelligible.arcoremlkit.common.samplerender.*
9 | import java.io.IOException
10 | import java.nio.ByteBuffer
11 | import java.nio.ByteOrder
12 |
13 | /**
14 | * This class both renders the AR camera background and composes the a scene foreground. The camera
15 | * background can be rendered as either camera image data or camera depth data. The virtual scene
16 | * can be composited with or without depth occlusion.
17 | */
18 | class BackgroundRenderer(render: SampleRender?) {
19 | companion object {
20 | private val TAG = BackgroundRenderer::class.java.simpleName
21 |
22 | // components_per_vertex * number_of_vertices * float_size
23 | private const val COORDS_BUFFER_SIZE = 2 * 4 * 4
24 | private val NDC_QUAD_COORDS_BUFFER = ByteBuffer.allocateDirect(COORDS_BUFFER_SIZE).order(
25 | ByteOrder.nativeOrder()
26 | ).asFloatBuffer()
27 | private val VIRTUAL_SCENE_TEX_COORDS_BUFFER =
28 | ByteBuffer.allocateDirect(COORDS_BUFFER_SIZE).order(
29 | ByteOrder.nativeOrder()
30 | ).asFloatBuffer()
31 |
32 | init {
33 | NDC_QUAD_COORDS_BUFFER.put(
34 | floatArrayOf( /*0:*/
35 | -1f, -1f, /*1:*/+1f, -1f, /*2:*/-1f, +1f, /*3:*/+1f, +1f
36 | )
37 | )
38 | VIRTUAL_SCENE_TEX_COORDS_BUFFER.put(
39 | floatArrayOf( /*0:*/
40 | 0f, 0f, /*1:*/1f, 0f, /*2:*/0f, 1f, /*3:*/1f, 1f
41 | )
42 | )
43 | }
44 | }
45 |
46 | private val cameraTexCoords =
47 | ByteBuffer.allocateDirect(COORDS_BUFFER_SIZE).order(ByteOrder.nativeOrder()).asFloatBuffer()
48 | private val mesh: Mesh
49 | private val cameraTexCoordsVertexBuffer: VertexBuffer
50 | private var backgroundShader: Shader? = null
51 | private val occlusionShader: Shader? = null
52 | private val cameraDepthTexture: Texture
53 | val colorTexture: Texture
54 | private var useDepthVisualization = false
55 | private val useOcclusion = false
56 | private var aspectRatio = 0f
57 |
58 | /**
59 | * Sets whether the background camera image should be replaced with a depth visualization instead.
60 | * This reloads the corresponding shader code, and must be called on the GL thread.
61 | */
62 | @Throws(IOException::class)
63 | fun setUseDepthVisualization(render: SampleRender?, useDepthVisualization: Boolean) {
64 | if (backgroundShader != null) {
65 | if (this.useDepthVisualization == useDepthVisualization) {
66 | return
67 | }
68 | backgroundShader!!.close()
69 | backgroundShader = null
70 | this.useDepthVisualization = useDepthVisualization
71 | }
72 | backgroundShader = if (useDepthVisualization) {
73 | Shader.createFromAssets(
74 | render!!,
75 | "shaders/background_show_depth_color_visualization.vert",
76 | "shaders/background_show_depth_color_visualization.frag", /*defines=*/
77 | null
78 | )
79 | .setTexture("u_CameraDepthTexture", cameraDepthTexture)
80 | .setDepthTest(false)
81 | .setDepthWrite(false)
82 | } else {
83 | Shader.createFromAssets(
84 | render!!,
85 | "shaders/background_show_camera.vert",
86 | "shaders/background_show_camera.frag", /*defines=*/
87 | null
88 | )
89 | .setTexture("u_CameraColorTexture", colorTexture)
90 | .setDepthTest(false)
91 | .setDepthWrite(false)
92 | }
93 | }
94 |
95 | /**
96 | * Updates the display geometry. This must be called every frame before calling either of
97 | * BackgroundRenderer's draw methods.
98 | *
99 | * @param frame The current `Frame` as returned by [Session.update].
100 | */
101 | fun updateDisplayGeometry(frame: Frame) {
102 | if (frame.hasDisplayGeometryChanged()) {
103 | // If display rotation changed (also includes view size change), we need to re-query the UV
104 | // coordinates for the screen rect, as they may have changed as well.
105 | frame.transformCoordinates2d(
106 | Coordinates2d.OPENGL_NORMALIZED_DEVICE_COORDINATES,
107 | NDC_QUAD_COORDS_BUFFER,
108 | Coordinates2d.TEXTURE_NORMALIZED,
109 | cameraTexCoords
110 | )
111 | cameraTexCoordsVertexBuffer.set(cameraTexCoords)
112 | }
113 | }
114 |
115 | /** Update depth texture with Image contents. */
116 | fun updateCameraDepthTexture(image: Image) {
117 | // SampleRender abstraction leaks here
118 | GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, cameraDepthTexture.getTextureId())
119 | GLES30.glTexImage2D(
120 | GLES30.GL_TEXTURE_2D,
121 | 0,
122 | GLES30.GL_RG8,
123 | image.width,
124 | image.height,
125 | 0,
126 | GLES30.GL_RG,
127 | GLES30.GL_UNSIGNED_BYTE,
128 | image.planes[0].buffer
129 | )
130 | if (useOcclusion) {
131 | aspectRatio = image.width.toFloat() / image.height.toFloat()
132 | occlusionShader?.setFloat("u_DepthAspectRatio", aspectRatio)
133 | }
134 | }
135 |
136 | /**
137 | * Draws the AR background image. The image will be drawn such that virtual content rendered with
138 | * the matrices provided by [com.google.ar.core.Camera.getViewMatrix] and
139 | * [com.google.ar.core.Camera.getProjectionMatrix] will
140 | * accurately follow static physical objects.
141 | */
142 | fun drawBackground(render: SampleRender) {
143 | render.draw(mesh, backgroundShader!!)
144 | }
145 |
146 | /**
147 | * Draws the virtual scene. Any objects rendered in the given [Framebuffer] will be drawn
148 | * given the previously specified [OcclusionMode].
149 | *
150 | *
151 | * Virtual content should be rendered using the matrices provided by [ ][com.google.ar.core.Camera.getViewMatrix] and [ ][com.google.ar.core.Camera.getProjectionMatrix].
152 | */
153 | fun drawVirtualScene(
154 | render: SampleRender, virtualSceneFramebuffer: Framebuffer, zNear: Float, zFar: Float
155 | ) {
156 | occlusionShader?.setTexture(
157 | "u_VirtualSceneColorTexture", virtualSceneFramebuffer.colorTexture!!
158 | )
159 | if (useOcclusion) {
160 | occlusionShader
161 | ?.setTexture("u_VirtualSceneDepthTexture", virtualSceneFramebuffer.colorTexture!!)
162 | ?.setFloat("u_ZNear", zNear)
163 | ?.setFloat("u_ZFar", zFar)
164 | }
165 | render.draw(mesh, occlusionShader!!)
166 | }
167 |
168 | /** Return the camera color texture generated by this object. */
169 | fun getCameraColorTexture(): Texture {
170 | return colorTexture
171 | }
172 |
173 | /** Return the camera depth texture generated by this object. */
174 | fun getCameraDepthTexture(): Texture {
175 | return cameraDepthTexture
176 | }
177 |
178 | /**
179 | * Allocates and initializes OpenGL resources needed by the background renderer. Must be called
180 | * during a [SampleRender.Renderer] callback, typically in [ ][SampleRender.Renderer.onSurfaceCreated].
181 | */
182 | init {
183 | colorTexture = Texture(
184 | render,
185 | Texture.Target.TEXTURE_EXTERNAL_OES,
186 | Texture.WrapMode.CLAMP_TO_EDGE, /*useMipmaps=*/
187 | false
188 | )
189 | cameraDepthTexture = Texture(
190 | render,
191 | Texture.Target.TEXTURE_2D,
192 | Texture.WrapMode.CLAMP_TO_EDGE, /*useMipmaps=*/
193 | false
194 | )
195 |
196 | // Create a Mesh with three vertex buffers: one for the screen coordinates (normalized device
197 | // coordinates), one for the camera texture coordinates (to be populated with proper data later
198 | // before drawing), and one for the virtual scene texture coordinates (unit texture quad)
199 | val screenCoordsVertexBuffer =
200 | VertexBuffer(render, /* numberOfEntriesPerVertex=*/2, NDC_QUAD_COORDS_BUFFER)
201 | cameraTexCoordsVertexBuffer =
202 | VertexBuffer(render, /*numberOfEntriesPerVertex=*/2, /*entries=*/null)
203 | val virtualSceneTexCoordsVertexBuffer =
204 | VertexBuffer(render, /* numberOfEntriesPerVertex=*/2, VIRTUAL_SCENE_TEX_COORDS_BUFFER)
205 | val vertexBuffers: Array = arrayOf(
206 | screenCoordsVertexBuffer, cameraTexCoordsVertexBuffer, virtualSceneTexCoordsVertexBuffer
207 | )
208 | mesh = Mesh(render, Mesh.PrimitiveMode.TRIANGLE_STRIP, /*indexBuffer=*/null, vertexBuffers)
209 | }
210 | }
--------------------------------------------------------------------------------
/app/src/main/java/io/intelligible/arcoremlkit/ml/ARCoreSessionLifecycle.kt:
--------------------------------------------------------------------------------
1 |
2 | package io.intelligible.arcoremlkit.ml
3 |
4 | import android.app.Activity
5 | import android.widget.Toast
6 | import androidx.lifecycle.DefaultLifecycleObserver
7 | import androidx.lifecycle.LifecycleOwner
8 | import com.google.ar.core.ArCoreApk
9 | import com.google.ar.core.Session
10 |
11 | import com.google.ar.core.exceptions.CameraNotAvailableException
12 | import io.intelligible.arcoremlkit.utils.CameraPermissionHelper
13 |
14 | /**
15 | * Manages an ARCore Session using the Android Lifecycle API.
16 | * Before starting a Session, this class requests an install of ARCore, if necessary,
17 | * and asks the user for permissions, if necessary.
18 | */
19 | class ARCoreSessionLifecycleHelper(
20 | val activity: Activity,
21 | val features: Set = setOf()
22 | ) : DefaultLifecycleObserver {
23 | var installRequested = false
24 | var sessionCache: Session? = null
25 | private set
26 |
27 | // Creating a Session may fail. In this case, sessionCache will remain null, and this function will be called with an exception.
28 | // See https://developers.google.com/ar/reference/java/com/google/ar/core/Session#Session(android.content.Context)
29 | // for more information.
30 | var exceptionCallback: ((Exception) -> Unit)? = null
31 |
32 | // After creating a session, but before Session.resume is called is the perfect time to setup a session.
33 | // Generally, you would use Session.configure or setCameraConfig here.
34 | // https://developers.google.com/ar/reference/java/com/google/ar/core/Session#public-void-configure-config-config
35 | // https://developers.google.com/ar/reference/java/com/google/ar/core/Session#setCameraConfig(com.google.ar.core.CameraConfig)
36 | var beforeSessionResume: ((Session) -> Unit)? = null
37 |
38 | // Creates a session. If ARCore is not installed, an installation will be requested.
39 | fun tryCreateSession(): Session? {
40 | // Request an installation if necessary.
41 | when (ArCoreApk.getInstance().requestInstall(activity, !installRequested)!!) {
42 | ArCoreApk.InstallStatus.INSTALL_REQUESTED -> {
43 | installRequested = true
44 | // tryCreateSession will be called again, so we return null for now.
45 | return null
46 | }
47 | ArCoreApk.InstallStatus.INSTALLED -> {
48 | // Left empty; nothing needs to be done
49 | }
50 | }
51 |
52 | // Create a session if ARCore is installed.
53 | return try {
54 | Session(activity, features)
55 | } catch (e: Exception) {
56 | exceptionCallback?.invoke(e)
57 | null
58 | }
59 | }
60 |
61 | override fun onResume(owner: LifecycleOwner) {
62 | if (!CameraPermissionHelper.hasCameraPermission(activity)) {
63 | CameraPermissionHelper.requestCameraPermission(activity)
64 | return
65 | }
66 |
67 | val session = tryCreateSession() ?: return
68 | try {
69 | beforeSessionResume?.invoke(session)
70 | session.resume()
71 | sessionCache = session
72 | } catch (e: CameraNotAvailableException) {
73 | exceptionCallback?.invoke(e)
74 | }
75 | }
76 |
77 | override fun onPause(owner: LifecycleOwner) {
78 | sessionCache?.pause()
79 | }
80 |
81 | override fun onDestroy(owner: LifecycleOwner) {
82 | // Explicitly close ARCore Session to release native resources.
83 | // Review the API reference for important considerations before calling close() in apps with
84 | // more complicated lifecycle requirements:
85 | // https://developers.google.com/ar/reference/java/arcore/reference/com/google/ar/core/Session#close()
86 | sessionCache?.close()
87 | sessionCache = null
88 | }
89 |
90 | fun onRequestPermissionsResult(
91 | requestCode: Int,
92 | permissions: Array,
93 | grantResults: IntArray
94 | ) {
95 | if (!CameraPermissionHelper.hasCameraPermission(activity)) {
96 | Toast.makeText(activity, "Camera permission is needed to run this application", Toast.LENGTH_LONG)
97 | .show()
98 | if (!CameraPermissionHelper.shouldShowRequestPermissionRationale(activity)) {
99 | // Permission denied with checking "Do not ask again".
100 | CameraPermissionHelper.launchPermissionSettings(activity)
101 | }
102 | activity.finish()
103 | }
104 | }
105 | }
106 |
--------------------------------------------------------------------------------
/app/src/main/java/io/intelligible/arcoremlkit/ml/AppRenderer.kt:
--------------------------------------------------------------------------------
1 |
2 | package io.intelligible.arcoremlkit.ml
3 |
4 | import android.opengl.Matrix
5 | import android.util.Log
6 | import androidx.lifecycle.DefaultLifecycleObserver
7 | import androidx.lifecycle.LifecycleOwner
8 | import com.google.ar.core.Anchor
9 | import com.google.ar.core.Coordinates2d
10 | import com.google.ar.core.Frame
11 | import com.google.ar.core.TrackingState
12 |
13 | import io.intelligible.arcoremlkit.ml.classification.DetectedObjectResult
14 | import io.intelligible.arcoremlkit.ml.classification.GoogleCloudVisionDetector
15 | import io.intelligible.arcoremlkit.ml.classification.MLKitObjectDetector
16 | import io.intelligible.arcoremlkit.ml.classification.ObjectDetector
17 | import io.intelligible.arcoremlkit.ml.render.LabelRender
18 | import io.intelligible.arcoremlkit.ml.render.PointCloudRender
19 | import com.google.ar.core.exceptions.CameraNotAvailableException
20 | import com.google.ar.core.exceptions.NotYetAvailableException
21 | import io.intelligible.arcoremlkit.common.helpers.DisplayRotationHelper
22 | import io.intelligible.arcoremlkit.common.samplerender.SampleRender
23 | import io.intelligible.arcoremlkit.common.samplerender.arcore.BackgroundRenderer
24 | import java.util.Collections
25 | import kotlinx.coroutines.CoroutineScope
26 | import kotlinx.coroutines.Dispatchers
27 | import kotlinx.coroutines.MainScope
28 | import kotlinx.coroutines.launch
29 |
30 |
31 | /**
32 | * Renders the HelloAR application into using our example Renderer.
33 | */
34 | class AppRenderer(val activity: MainActivity) : DefaultLifecycleObserver, SampleRender.Renderer, CoroutineScope by MainScope() {
35 | companion object {
36 | val TAG = "HelloArRenderer"
37 | }
38 |
39 | lateinit var view: MainActivityView
40 |
41 | val displayRotationHelper = DisplayRotationHelper(activity)
42 | lateinit var backgroundRenderer: BackgroundRenderer
43 | private val pointCloudRender = PointCloudRender()
44 | val labelRenderer = LabelRender()
45 |
46 | val viewMatrix = FloatArray(16)
47 | val projectionMatrix = FloatArray(16)
48 | val viewProjectionMatrix = FloatArray(16)
49 |
50 | val arLabeledAnchors = Collections.synchronizedList(mutableListOf())
51 | var scanButtonWasPressed = false
52 |
53 | val mlKitAnalyzer = MLKitObjectDetector(activity)
54 | val gcpAnalyzer = GoogleCloudVisionDetector(activity)
55 |
56 | var currentAnalyzer: ObjectDetector = gcpAnalyzer
57 |
58 | override fun onResume(owner: LifecycleOwner) {
59 | displayRotationHelper.onResume()
60 | }
61 |
62 | override fun onPause(owner: LifecycleOwner) {
63 | displayRotationHelper.onPause()
64 | }
65 |
66 | fun bindView(view: MainActivityView) {
67 | this.view = view
68 |
69 | view.scanButton.setOnClickListener {
70 | // frame.acquireCameraImage is dependent on an ARCore Frame, which is only available in onDrawFrame.
71 | // Use a boolean and check its state in onDrawFrame to interact with the camera image.
72 | scanButtonWasPressed = true
73 | view.setScanningActive(true)
74 | hideSnackbar()
75 | }
76 |
77 | view.useCloudMlSwitch.setOnCheckedChangeListener { _, isChecked ->
78 | currentAnalyzer = if (isChecked) gcpAnalyzer else mlKitAnalyzer
79 | }
80 |
81 | val gcpConfigured = gcpAnalyzer.credentials != null
82 | view.useCloudMlSwitch.isChecked = gcpConfigured
83 | view.useCloudMlSwitch.isEnabled = gcpConfigured
84 | currentAnalyzer = if (gcpConfigured) gcpAnalyzer else mlKitAnalyzer
85 |
86 | if (!gcpConfigured) {
87 | showSnackbar("Google Cloud Vision isn't configured (see README). The Cloud ML switch will be disabled.")
88 | }
89 |
90 | view.resetButton.setOnClickListener {
91 | arLabeledAnchors.clear()
92 | view.resetButton.isEnabled = false
93 | hideSnackbar()
94 | }
95 | }
96 |
97 | override fun onSurfaceCreated(render: SampleRender?) {
98 | backgroundRenderer = BackgroundRenderer(render).apply {
99 | setUseDepthVisualization(render, false)
100 | }
101 | pointCloudRender.onSurfaceCreated(render!!)
102 | labelRenderer.onSurfaceCreated(render)
103 | }
104 |
105 | override fun onSurfaceChanged(render: SampleRender?, width: Int, height: Int) {
106 | displayRotationHelper.onSurfaceChanged(width, height)
107 | }
108 |
109 | var objectResults: List? = null
110 |
111 | override fun onDrawFrame(render: SampleRender?) {
112 | val session = activity.arCoreSessionHelper.sessionCache ?: return
113 | session.setCameraTextureNames(intArrayOf(backgroundRenderer.colorTexture.textureId[0]))
114 |
115 | // Notify ARCore session that the view size changed so that the perspective matrix and
116 | // the video background can be properly adjusted.
117 | displayRotationHelper.updateSessionIfNeeded(session)
118 |
119 | val frame = try {
120 | session.update()
121 | } catch (e: CameraNotAvailableException) {
122 | Log.e(TAG, "Camera not available during onDrawFrame", e)
123 | showSnackbar("Camera not available. Try restarting the app.")
124 | return
125 | }
126 |
127 | backgroundRenderer.updateDisplayGeometry(frame)
128 | backgroundRenderer.drawBackground(render!!)
129 |
130 | // Get camera and projection matrices.
131 | val camera = frame.camera
132 | camera.getViewMatrix(viewMatrix, 0)
133 | camera.getProjectionMatrix(projectionMatrix, 0, 0.01f, 100.0f)
134 | Matrix.multiplyMM(viewProjectionMatrix, 0, projectionMatrix, 0, viewMatrix, 0)
135 |
136 | // Handle tracking failures.
137 | if (camera.trackingState != TrackingState.TRACKING) {
138 | return
139 | }
140 |
141 | // Draw point cloud.
142 | frame.acquirePointCloud().use { pointCloud ->
143 | pointCloudRender.drawPointCloud(render, pointCloud, viewProjectionMatrix)
144 | }
145 |
146 | // Frame.acquireCameraImage must be used on the GL thread.
147 | // Check if the button was pressed last frame to start processing the camera image.
148 | if (scanButtonWasPressed) {
149 | scanButtonWasPressed = false
150 | val cameraImage = frame.tryAcquireCameraImage()
151 | if (cameraImage != null) {
152 | // Call our ML model on an IO thread.
153 | launch(Dispatchers.IO) {
154 | val cameraId = session.cameraConfig.cameraId
155 | val imageRotation = displayRotationHelper.getCameraSensorToDisplayRotation(cameraId)
156 | objectResults = currentAnalyzer.analyze(cameraImage, imageRotation)
157 | cameraImage.close()
158 | }
159 | }
160 | }
161 |
162 | /** If results were completed this frame, create [Anchor]s from model results. */
163 | val objects = objectResults
164 | if (objects != null) {
165 | objectResults = null
166 | Log.e(TAG, "$currentAnalyzer got objects: $objects")
167 | val anchors = objects.mapNotNull { obj ->
168 | obj.label
169 | val (atX, atY) = obj.centerCoordinate
170 | val anchor = createAnchor(atX.toFloat(), atY.toFloat(), frame) ?: return@mapNotNull null
171 | Log.i(TAG, "Created anchor ${anchor.pose} from hit test")
172 | ARLabeledAnchor(anchor, obj.label)
173 | }
174 | arLabeledAnchors.addAll(anchors)
175 | view.post {
176 | view.resetButton.isEnabled = arLabeledAnchors.isNotEmpty()
177 | view.setScanningActive(false)
178 | when {
179 | objects.isEmpty() && currentAnalyzer == mlKitAnalyzer && !mlKitAnalyzer.hasCustomModel() ->
180 | showSnackbar("Default ML Kit classification model returned no results. " +
181 | "For better classification performance, see the README to configure a custom model.")
182 | objects.isEmpty() ->
183 | showSnackbar("Classification model returned no results.")
184 | anchors.size != objects.size ->
185 | showSnackbar("Objects were classified, but could not be attached to an anchor. " +
186 | "Try moving your device around to obtain a better understanding of the environment.")
187 | }
188 | }
189 | }
190 |
191 | // Draw labels at their anchor position.
192 | for (arDetectedObject in arLabeledAnchors) {
193 | val anchor = arDetectedObject.anchor
194 | if (anchor.trackingState != TrackingState.TRACKING) continue
195 | labelRenderer.draw(
196 | render,
197 | viewProjectionMatrix,
198 | anchor.pose,
199 | camera.pose,
200 | arDetectedObject.label
201 | )
202 | }
203 | }
204 |
205 | /**
206 | * Utility method for [Frame.acquireCameraImage] that maps [NotYetAvailableException] to `null`.
207 | */
208 | fun Frame.tryAcquireCameraImage() = try {
209 | acquireCameraImage()
210 | } catch (e: NotYetAvailableException) {
211 | null
212 | } catch (e: Throwable) {
213 | throw e
214 | }
215 |
216 | private fun showSnackbar(message: String): Unit =
217 | activity.view.snackbarHelper.showError(activity, message)
218 |
219 | private fun hideSnackbar() = activity.view.snackbarHelper.hide(activity)
220 |
221 | /**
222 | * Temporary arrays to prevent allocations in [createAnchor].
223 | */
224 | private val convertFloats = FloatArray(4)
225 | private val convertFloatsOut = FloatArray(4)
226 |
227 | /** Create an anchor using (x, y) coordinates in the [Coordinates2d.IMAGE_PIXELS] coordinate space. */
228 | fun createAnchor(xImage: Float, yImage: Float, frame: Frame): Anchor? {
229 | // IMAGE_PIXELS -> VIEW
230 | convertFloats[0] = xImage
231 | convertFloats[1] = yImage
232 | frame.transformCoordinates2d(
233 | Coordinates2d.IMAGE_PIXELS,
234 | convertFloats,
235 | Coordinates2d.VIEW,
236 | convertFloatsOut
237 | )
238 |
239 | // Conduct a hit test using the VIEW coordinates
240 | val hits = frame.hitTest(convertFloatsOut[0], convertFloatsOut[1])
241 | val result = hits.getOrNull(0) ?: return null
242 | return result.trackable.createAnchor(result.hitPose)
243 | }
244 | }
245 |
246 | data class ARLabeledAnchor(val anchor: Anchor, val label: String)
--------------------------------------------------------------------------------
/app/src/main/java/io/intelligible/arcoremlkit/ml/MainActivity.kt:
--------------------------------------------------------------------------------
1 |
2 | package io.intelligible.arcoremlkit.ml
3 |
4 | import android.os.Bundle
5 | import android.util.Log
6 | import android.widget.Toast
7 | import androidx.appcompat.app.AppCompatActivity
8 | import com.google.ar.core.CameraConfig
9 | import com.google.ar.core.CameraConfigFilter
10 | import com.google.ar.core.Config
11 |
12 | import com.google.ar.core.exceptions.CameraNotAvailableException
13 | import com.google.ar.core.exceptions.UnavailableApkTooOldException
14 | import com.google.ar.core.exceptions.UnavailableArcoreNotInstalledException
15 | import com.google.ar.core.exceptions.UnavailableDeviceNotCompatibleException
16 | import com.google.ar.core.exceptions.UnavailableSdkTooOldException
17 | import com.google.ar.core.exceptions.UnavailableUserDeclinedInstallationException
18 | import io.intelligible.arcoremlkit.common.helpers.FullScreenHelper
19 |
20 |
21 | class MainActivity : AppCompatActivity() {
22 | val TAG = "MainActivity"
23 | lateinit var arCoreSessionHelper: ARCoreSessionLifecycleHelper
24 |
25 | lateinit var renderer: AppRenderer
26 | lateinit var view: MainActivityView
27 |
28 | override fun onCreate(savedInstanceState: Bundle?) {
29 | super.onCreate(savedInstanceState)
30 |
31 | arCoreSessionHelper = ARCoreSessionLifecycleHelper(this)
32 | // When session creation or session.resume fails, we display a message and log detailed information.
33 | arCoreSessionHelper.exceptionCallback = { exception ->
34 | val message = when (exception) {
35 | is UnavailableArcoreNotInstalledException,
36 | is UnavailableUserDeclinedInstallationException -> "Please install ARCore"
37 | is UnavailableApkTooOldException -> "Please update ARCore"
38 | is UnavailableSdkTooOldException -> "Please update this app"
39 | is UnavailableDeviceNotCompatibleException -> "This device does not support AR"
40 | is CameraNotAvailableException -> "Camera not available. Try restarting the app."
41 | else -> "Failed to create AR session: $exception"
42 | }
43 | Log.e(TAG, message, exception)
44 | Toast.makeText(this, message, Toast.LENGTH_LONG).show()
45 | }
46 |
47 | arCoreSessionHelper.beforeSessionResume = { session ->
48 | session.configure(
49 | session.config.apply {
50 | // To get the best image of the object in question, enable autofocus.
51 | focusMode = Config.FocusMode.AUTO
52 | if (session.isDepthModeSupported(Config.DepthMode.AUTOMATIC)) {
53 | depthMode = Config.DepthMode.AUTOMATIC
54 | }
55 | }
56 | )
57 |
58 | val filter = CameraConfigFilter(session)
59 | .setFacingDirection(CameraConfig.FacingDirection.BACK)
60 | val configs = session.getSupportedCameraConfigs(filter)
61 | val sort = compareByDescending { it.imageSize.width }
62 | .thenByDescending { it.imageSize.height }
63 | session.cameraConfig = configs.sortedWith(sort)[0]
64 | }
65 | lifecycle.addObserver(arCoreSessionHelper)
66 |
67 | renderer = AppRenderer(this)
68 | lifecycle.addObserver(renderer)
69 | view = MainActivityView(this, renderer)
70 | setContentView(view.root)
71 | renderer.bindView(view)
72 | lifecycle.addObserver(view)
73 | }
74 |
75 | override fun onRequestPermissionsResult(
76 | requestCode: Int,
77 | permissions: Array,
78 | grantResults: IntArray
79 | ) {
80 | super.onRequestPermissionsResult(requestCode, permissions, grantResults)
81 | arCoreSessionHelper.onRequestPermissionsResult(requestCode, permissions, grantResults)
82 | }
83 |
84 | override fun onWindowFocusChanged(hasFocus: Boolean) {
85 | super.onWindowFocusChanged(hasFocus)
86 | FullScreenHelper.setFullScreenOnWindowFocusChanged(this, hasFocus)
87 | }
88 | }
--------------------------------------------------------------------------------
/app/src/main/java/io/intelligible/arcoremlkit/ml/MainActivityView.kt:
--------------------------------------------------------------------------------
1 |
2 | package io.intelligible.arcoremlkit.ml
3 |
4 | import android.opengl.GLSurfaceView
5 | import android.view.View
6 | import android.widget.TextView
7 | import androidx.appcompat.widget.AppCompatButton
8 | import androidx.appcompat.widget.SwitchCompat
9 | import androidx.cardview.widget.CardView
10 | import androidx.lifecycle.DefaultLifecycleObserver
11 | import androidx.lifecycle.LifecycleOwner
12 | import io.intelligible.arcoremlkit.R
13 | import io.intelligible.arcoremlkit.common.helpers.SnackbarHelper
14 | import io.intelligible.arcoremlkit.common.samplerender.SampleRender
15 |
16 | /**
17 | * Wraps [R.layout.activity_main] and controls lifecycle operations for [GLSurfaceView].
18 | */
19 | class MainActivityView(val activity: MainActivity, renderer: AppRenderer) : DefaultLifecycleObserver {
20 | val root = View.inflate(activity, R.layout.activity_main, null)
21 | val surfaceView = root.findViewById(R.id.surfaceview).apply {
22 | SampleRender(this, renderer, activity.assets)
23 | }
24 | val useCloudMlSwitch = root.findViewById(R.id.useCloudMlSwitch)
25 | val scanButton = root.findViewById(R.id.scanButton)
26 | val resetButton = root.findViewById(R.id.clearButton)
27 | val snackbarHelper = SnackbarHelper().apply {
28 | setParentView(root.findViewById(R.id.coordinatorLayout))
29 | setMaxLines(6)
30 | }
31 |
32 | override fun onResume(owner: LifecycleOwner) {
33 | surfaceView.onResume()
34 | }
35 |
36 | override fun onPause(owner: LifecycleOwner) {
37 | surfaceView.onPause()
38 | }
39 |
40 | fun post(action: Runnable) = root.post(action)
41 |
42 | /**
43 | * Toggles the scan button depending on if scanning is in progress.
44 | */
45 | fun setScanningActive(active: Boolean) = when(active) {
46 | true -> {
47 | scanButton.isEnabled = false
48 | scanButton.setText("Scanning")
49 | }
50 | false -> {
51 | scanButton.isEnabled = true
52 | scanButton.setText("Scanning")
53 | }
54 | }
55 | }
--------------------------------------------------------------------------------
/app/src/main/java/io/intelligible/arcoremlkit/ml/classification/DetectedObjectResult.kt:
--------------------------------------------------------------------------------
1 |
2 | package io.intelligible.arcoremlkit.ml.classification
3 |
4 | /**
5 | * A [DetectedObjectResult] describes a single result in a labeled image.
6 | * @property confidence The model's reported confidence for this inference result (normalized over `[0, 1]`).
7 | * @property label The model's reported label for this result.
8 | * @property centerCoordinate A point on the image that best describes the object's location.
9 | */
10 | data class DetectedObjectResult(
11 | val confidence: Float,
12 | val label: String,
13 | val centerCoordinate: Pair
14 | )
--------------------------------------------------------------------------------
/app/src/main/java/io/intelligible/arcoremlkit/ml/classification/GoogleCloudVisionDetector.kt:
--------------------------------------------------------------------------------
1 |
2 | package io.intelligible.arcoremlkit.ml.classification
3 |
4 | import android.media.Image
5 | import android.util.Log
6 | import io.intelligible.arcoremlkit.ml.MainActivity
7 | import io.intelligible.arcoremlkit.ml.classification.utils.ImageUtils
8 | import io.intelligible.arcoremlkit.ml.classification.utils.ImageUtils.toByteArray
9 | import io.intelligible.arcoremlkit.ml.classification.utils.VertexUtils.calculateAverage
10 | import io.intelligible.arcoremlkit.ml.classification.utils.VertexUtils.rotateCoordinates
11 | import io.intelligible.arcoremlkit.ml.classification.utils.VertexUtils.toAbsoluteCoordinates
12 | import com.google.auth.oauth2.GoogleCredentials
13 | import com.google.cloud.vision.v1.AnnotateImageRequest
14 | import com.google.cloud.vision.v1.Feature
15 | import com.google.cloud.vision.v1.ImageAnnotatorClient
16 | import com.google.cloud.vision.v1.ImageAnnotatorSettings
17 | import com.google.protobuf.ByteString
18 | import com.google.cloud.vision.v1.Image as GCVImage
19 |
20 | /**
21 | * https://cloud.google.com/vision/docs/object-localizer
22 | *
23 | * Finds detected objects ([DetectedObjectResult]s) given an [android.media.Image].
24 | */
25 | class GoogleCloudVisionDetector(val activity: MainActivity) : ObjectDetector(activity) {
26 | companion object {
27 | val TAG = "GoogleCloudVisionDetector"
28 | }
29 |
30 | val credentials = try {
31 | // Providing GCP credentials is not mandatory for this app, so the existence of R.raw.credentials
32 | // is not guaranteed. Instead, use getIdentifier to determine an optional resource.
33 | val res = activity.resources.getIdentifier("credentials", "raw", activity.packageName)
34 | if (res == 0) error("Missing GCP credentials in res/raw/credentials.json.")
35 | GoogleCredentials.fromStream(activity.resources.openRawResource(res))
36 | } catch (e: Exception) {
37 | Log.e(TAG, "Unable to create Google credentials from res/raw/credentials.json. Cloud ML will be disabled.", e)
38 | null
39 | }
40 | val settings = ImageAnnotatorSettings.newBuilder().setCredentialsProvider { credentials }.build()
41 | val vision = ImageAnnotatorClient.create(settings)
42 |
43 | override suspend fun analyze(image: Image, imageRotation: Int): List {
44 | // `image` is in YUV (https://developers.google.com/ar/reference/java/com/google/ar/core/Frame#acquireCameraImage()),
45 | val convertYuv = convertYuv(image)
46 |
47 | // The model performs best on upright images, so rotate it.
48 | val rotatedImage = ImageUtils.rotateBitmap(convertYuv, imageRotation)
49 |
50 | // Perform request on Google Cloud Vision APIs.
51 | val request = createAnnotateImageRequest(rotatedImage.toByteArray())
52 | val response = vision.batchAnnotateImages(listOf(request))
53 |
54 | // Process result and map to DetectedObjectResult.
55 | val objectAnnotationsResult = response.responsesList.first().localizedObjectAnnotationsList
56 | return objectAnnotationsResult.map {
57 | val center = it.boundingPoly.normalizedVerticesList.calculateAverage()
58 | val absoluteCoordinates = center.toAbsoluteCoordinates(rotatedImage.width, rotatedImage.height)
59 | val rotatedCoordinates = absoluteCoordinates.rotateCoordinates(rotatedImage.width, rotatedImage.height, imageRotation)
60 | DetectedObjectResult(it.score, it.name, rotatedCoordinates)
61 | }
62 | }
63 |
64 | /**
65 | * Creates an [AnnotateImageRequest] from image's byte array.
66 | *
67 | * https://cloud.google.com/vision/docs/reference/rest/v1/AnnotateImageRequest
68 | */
69 | private fun createAnnotateImageRequest(imageBytes: ByteArray): AnnotateImageRequest {
70 | // GCVImage is a typealias for com.google.cloud.vision's Image, needed to differentiate from android.media.Image
71 | val image = GCVImage.newBuilder().setContent(ByteString.copyFrom(imageBytes))
72 | val features = Feature.newBuilder().setType(Feature.Type.OBJECT_LOCALIZATION)
73 | return AnnotateImageRequest.newBuilder()
74 | .setImage(image)
75 | .addFeatures(features)
76 | .build()
77 | }
78 | }
--------------------------------------------------------------------------------
/app/src/main/java/io/intelligible/arcoremlkit/ml/classification/MLKitObjectDetector.kt:
--------------------------------------------------------------------------------
1 |
2 | package io.intelligible.arcoremlkit.ml.classification
3 |
4 | import android.app.Activity
5 | import android.media.Image
6 | import io.intelligible.arcoremlkit.ml.classification.utils.ImageUtils
7 | import io.intelligible.arcoremlkit.ml.classification.utils.VertexUtils.rotateCoordinates
8 | import com.google.mlkit.common.model.LocalModel
9 | import com.google.mlkit.vision.common.InputImage
10 | import com.google.mlkit.vision.objects.ObjectDetection
11 | import com.google.mlkit.vision.objects.custom.CustomObjectDetectorOptions
12 | import kotlinx.coroutines.tasks.asDeferred
13 |
14 | /**
15 | * Analyzes an image using ML Kit.
16 | */
17 | class MLKitObjectDetector(context: Activity) : ObjectDetector(context) {
18 | // To use a custom model, follow steps on https://developers.google.com/ml-kit/vision/object-detection/custom-models/android.
19 | val model = LocalModel.Builder().setAssetFilePath("model.tflite").build()
20 | val builder = CustomObjectDetectorOptions.Builder(model)
21 |
22 | // For the ML Kit default model, use the following:
23 | // val builder = ObjectDetectorOptions.Builder()
24 |
25 | private val options = builder
26 | .setDetectorMode(CustomObjectDetectorOptions.SINGLE_IMAGE_MODE)
27 | .enableClassification()
28 | .enableMultipleObjects()
29 | .build()
30 | private val detector = ObjectDetection.getClient(options)
31 |
32 | override suspend fun analyze(image: Image, imageRotation: Int): List {
33 | // `image` is in YUV (https://developers.google.com/ar/reference/java/com/google/ar/core/Frame#acquireCameraImage()),
34 | val convertYuv = convertYuv(image)
35 |
36 | // The model performs best on upright images, so rotate it.
37 | val rotatedImage = ImageUtils.rotateBitmap(convertYuv, imageRotation)
38 |
39 | val inputImage = InputImage.fromBitmap(rotatedImage, 0)
40 |
41 | val mlKitDetectedObjects = detector.process(inputImage).asDeferred().await()
42 | return mlKitDetectedObjects.mapNotNull { obj ->
43 | val bestLabel = obj.labels.maxByOrNull { label -> label.confidence } ?: return@mapNotNull null
44 | val coords = obj.boundingBox.exactCenterX().toInt() to obj.boundingBox.exactCenterY().toInt()
45 | val rotatedCoordinates = coords.rotateCoordinates(rotatedImage.width, rotatedImage.height, imageRotation)
46 | DetectedObjectResult(bestLabel.confidence, bestLabel.text, rotatedCoordinates)
47 | }
48 | }
49 |
50 | @Suppress("USELESS_IS_CHECK")
51 | fun hasCustomModel() = builder is CustomObjectDetectorOptions.Builder
52 | }
--------------------------------------------------------------------------------
/app/src/main/java/io/intelligible/arcoremlkit/ml/classification/ObjectDetector.kt:
--------------------------------------------------------------------------------
1 |
2 | package io.intelligible.arcoremlkit.ml.classification
3 |
4 | import android.content.Context
5 | import android.graphics.Bitmap
6 | import android.media.Image
7 | import io.intelligible.arcoremlkit.YuvToRgbConverter
8 | import com.google.ar.core.Frame
9 |
10 | /**Frame
11 | * Describes a common interface for [GoogleCloudVisionDetector] and [MLKitObjectDetector] that can
12 | * infer object labels in a given [Image] and gives results in a list of [DetectedObjectResult].
13 | */
14 | abstract class ObjectDetector(val context: Context) {
15 | val yuvConverter = YuvToRgbConverter(context)
16 |
17 | /**
18 | * Infers a list of [DetectedObjectResult] given a camera image frame, which contains a confidence level,
19 | * a label, and a pixel coordinate on the image which is believed to be the center of the object.
20 | */
21 | abstract suspend fun analyze(image: Image, imageRotation: Int): List
22 |
23 | /**
24 | * [Frame.acquireCameraImage] returns an image in YUV format.
25 | * https://developers.google.com/ar/reference/java/com/google/ar/core/Frame#acquireCameraImage()
26 | *
27 | * Converts a YUV image to a [Bitmap] using [YuvToRgbConverter].
28 | */
29 | fun convertYuv(image: Image): Bitmap {
30 | return Bitmap.createBitmap(image.width, image.height, Bitmap.Config.ARGB_8888).apply {
31 | yuvConverter.yuvToRgb(image, this)
32 | }
33 | }
34 | }
--------------------------------------------------------------------------------
/app/src/main/java/io/intelligible/arcoremlkit/ml/classification/utils/ImageUtils.kt:
--------------------------------------------------------------------------------
1 |
2 | package io.intelligible.arcoremlkit.ml.classification.utils
3 |
4 | import android.graphics.Bitmap
5 | import android.graphics.Matrix
6 | import java.io.ByteArrayOutputStream
7 |
8 | object ImageUtils {
9 | /**
10 | * Creates a new [Bitmap] by rotating the input bitmap [rotation] degrees.
11 | * If [rotation] is 0, the input bitmap is returned.
12 | */
13 | fun rotateBitmap(bitmap: Bitmap, rotation: Int): Bitmap {
14 | if (rotation == 0) return bitmap
15 |
16 | val matrix = Matrix()
17 | matrix.postRotate(rotation.toFloat())
18 | return Bitmap.createBitmap(bitmap, 0, 0, bitmap.width, bitmap.height, matrix, false)
19 | }
20 |
21 |
22 | /**
23 | * Converts a [Bitmap] to [ByteArray] using [Bitmap.compress].
24 | */
25 | fun Bitmap.toByteArray(): ByteArray = ByteArrayOutputStream().use { stream ->
26 | this.compress(Bitmap.CompressFormat.JPEG, 100, stream)
27 | stream.toByteArray()
28 | }
29 | }
--------------------------------------------------------------------------------
/app/src/main/java/io/intelligible/arcoremlkit/ml/classification/utils/VertexUtils.kt:
--------------------------------------------------------------------------------
1 |
2 | package io.intelligible.arcoremlkit.ml.classification.utils
3 |
4 | import com.google.cloud.vision.v1.NormalizedVertex
5 |
6 | object VertexUtils {
7 | /**
8 | * Convert a [NormalizedVertex] to an absolute coordinate pair.
9 | */
10 | fun NormalizedVertex.toAbsoluteCoordinates(
11 | imageWidth: Int,
12 | imageHeight: Int,
13 | ): Pair {
14 | return (x * imageWidth).toInt() to (y * imageHeight).toInt()
15 | }
16 |
17 | /**
18 | * Rotates a coordinate pair according to [imageRotation].
19 | */
20 | fun Pair.rotateCoordinates(
21 | imageWidth: Int,
22 | imageHeight: Int,
23 | imageRotation: Int,
24 | ): Pair {
25 | val (x, y) = this
26 | return when (imageRotation) {
27 | 0 -> x to y
28 | 180 -> imageWidth - x to imageHeight - y
29 | 90 -> y to imageWidth - x
30 | 270 -> imageHeight - y to x
31 | else -> error("Invalid imageRotation $imageRotation")
32 | }
33 | }
34 |
35 | /**
36 | * Calculate a point using the average of points in the bounding polygon.
37 | */
38 | fun List.calculateAverage(): NormalizedVertex {
39 | var averageX = 0f
40 | var averageY = 0f
41 | for (vertex in this) {
42 | averageX += vertex.x / size
43 | averageY += vertex.y / size
44 | }
45 | return NormalizedVertex.newBuilder().setX(averageX).setY(averageY).build()
46 | }
47 |
48 | }
--------------------------------------------------------------------------------
/app/src/main/java/io/intelligible/arcoremlkit/ml/render/LabelRender.kt:
--------------------------------------------------------------------------------
1 |
2 | package io.intelligible.arcoremlkit.ml.render
3 |
4 | import com.google.ar.core.Pose
5 | import io.intelligible.arcoremlkit.common.samplerender.Mesh
6 | import io.intelligible.arcoremlkit.common.samplerender.SampleRender
7 | import io.intelligible.arcoremlkit.common.samplerender.Shader
8 | import io.intelligible.arcoremlkit.common.samplerender.VertexBuffer
9 | import java.nio.ByteBuffer
10 | import java.nio.ByteOrder
11 |
12 | /**
13 | * Draws a label. See [draw].
14 | */
15 | class LabelRender {
16 | companion object {
17 | private const val TAG = "LabelRender"
18 | val COORDS_BUFFER_SIZE = 2 * 4 * 4
19 |
20 | /**
21 | * Vertex buffer data for the mesh quad.
22 | */
23 | val NDC_QUAD_COORDS_BUFFER =
24 | ByteBuffer.allocateDirect(COORDS_BUFFER_SIZE).order(
25 | ByteOrder.nativeOrder()
26 | ).asFloatBuffer().apply {
27 | put(
28 | floatArrayOf(
29 | /*0:*/
30 | -1.5f, -1.5f,
31 | /*1:*/
32 | 1.5f, -1.5f,
33 | /*2:*/
34 | -1.5f, 1.5f,
35 | /*3:*/
36 | 1.5f, 1.5f,
37 | )
38 | )
39 | }
40 |
41 | /**
42 | * Vertex buffer data for texture coordinates.
43 | */
44 | val SQUARE_TEX_COORDS_BUFFER =
45 | ByteBuffer.allocateDirect(COORDS_BUFFER_SIZE).order(
46 | ByteOrder.nativeOrder()
47 | ).asFloatBuffer().apply {
48 | put(
49 | floatArrayOf(
50 | /*0:*/
51 | 0f, 0f,
52 | /*1:*/
53 | 1f, 0f,
54 | /*2:*/
55 | 0f, 1f,
56 | /*3:*/
57 | 1f, 1f,
58 | )
59 | )
60 | }
61 | }
62 |
63 | val cache = TextTextureCache()
64 |
65 | lateinit var mesh: Mesh
66 | lateinit var shader: Shader
67 |
68 | fun onSurfaceCreated(render: SampleRender) {
69 | shader = Shader.createFromAssets(render, "shaders/label.vert", "shaders/label.frag", null)
70 | .setBlend(
71 | Shader.BlendFactor.ONE, // ALPHA (src)
72 | Shader.BlendFactor.ONE_MINUS_SRC_ALPHA // ALPHA (dest)
73 | )
74 | .setDepthTest(false)
75 | .setDepthWrite(false)
76 |
77 | val vertexBuffers = arrayOf(
78 | VertexBuffer(render, 2, NDC_QUAD_COORDS_BUFFER),
79 | VertexBuffer(render, 2, SQUARE_TEX_COORDS_BUFFER),
80 | )
81 | mesh = Mesh(render, Mesh.PrimitiveMode.TRIANGLE_STRIP, null, vertexBuffers)
82 | }
83 |
84 | val labelOrigin = FloatArray(3)
85 |
86 | /**
87 | * Draws a label quad with text [label] at [pose]. The label will rotate to face [cameraPose] around the Y-axis.
88 | */
89 | fun draw(
90 | render: SampleRender,
91 | viewProjectionMatrix: FloatArray,
92 | pose: Pose,
93 | cameraPose: Pose,
94 | label: String
95 | ) {
96 | labelOrigin[0] = pose.tx()
97 | labelOrigin[1] = pose.ty()
98 | labelOrigin[2] = pose.tz()
99 | shader
100 | .setMat4("u_ViewProjection", viewProjectionMatrix)
101 | .setVec3("u_LabelOrigin", labelOrigin)
102 | .setVec3("u_CameraPos", cameraPose.translation)
103 | .setTexture("uTexture", cache.get(render, label))
104 | render.draw(mesh, shader)
105 | }
106 | }
--------------------------------------------------------------------------------
/app/src/main/java/io/intelligible/arcoremlkit/ml/render/PointCloudRender.kt:
--------------------------------------------------------------------------------
1 |
2 | package io.intelligible.arcoremlkit.ml.render
3 |
4 | import com.google.ar.core.PointCloud
5 | import io.intelligible.arcoremlkit.common.samplerender.Mesh
6 | import io.intelligible.arcoremlkit.common.samplerender.SampleRender
7 | import io.intelligible.arcoremlkit.common.samplerender.Shader
8 | import io.intelligible.arcoremlkit.common.samplerender.VertexBuffer
9 |
10 | class PointCloudRender {
11 | lateinit var pointCloudVertexBuffer: VertexBuffer
12 | lateinit var pointCloudMesh: Mesh
13 | lateinit var pointCloudShader: Shader
14 |
15 | // Keep track of the last point cloud rendered to avoid updating the VBO if point cloud
16 | // was not changed. Do this using the timestamp since we can't compare PointCloud objects.
17 | var lastPointCloudTimestamp: Long = 0
18 |
19 | fun onSurfaceCreated(render: SampleRender) {
20 | // Point cloud
21 | pointCloudShader = Shader.createFromAssets(
22 | render, "shaders/point_cloud.vert", "shaders/point_cloud.frag", /*defines=*/null
23 | )
24 | .setVec4(
25 | "u_Color", floatArrayOf(31.0f / 255.0f, 188.0f / 255.0f, 210.0f / 255.0f, 1.0f)
26 | )
27 | .setFloat("u_PointSize", 5.0f)
28 |
29 | // four entries per vertex: X, Y, Z, confidence
30 | pointCloudVertexBuffer = VertexBuffer(render, 4, null)
31 | val pointCloudVertexBuffers = arrayOf(pointCloudVertexBuffer)
32 | pointCloudMesh = Mesh(
33 | render, Mesh.PrimitiveMode.POINTS, null, pointCloudVertexBuffers
34 | )
35 | }
36 |
37 | fun drawPointCloud(
38 | render: SampleRender,
39 | pointCloud: PointCloud,
40 | modelViewProjectionMatrix: FloatArray
41 | ) {
42 | if (pointCloud.timestamp > lastPointCloudTimestamp) {
43 | pointCloudVertexBuffer.set(pointCloud.points)
44 | lastPointCloudTimestamp = pointCloud.timestamp
45 | }
46 | pointCloudShader.setMat4("u_ModelViewProjection", modelViewProjectionMatrix)
47 | render.draw(pointCloudMesh, pointCloudShader)
48 | }
49 | }
--------------------------------------------------------------------------------
/app/src/main/java/io/intelligible/arcoremlkit/ml/render/TextTextureCache.kt:
--------------------------------------------------------------------------------
1 |
2 | package io.intelligible.arcoremlkit.ml.render
3 |
4 | import android.graphics.Bitmap
5 | import android.graphics.Canvas
6 | import android.graphics.Paint
7 | import android.graphics.Typeface
8 | import android.opengl.GLES30
9 | import io.intelligible.arcoremlkit.common.samplerender.GLError
10 | import io.intelligible.arcoremlkit.common.samplerender.SampleRender
11 | import io.intelligible.arcoremlkit.common.samplerender.Texture
12 | import java.nio.ByteBuffer
13 |
14 | /**
15 | * Generates and caches GL textures for label names.
16 | */
17 | class TextTextureCache {
18 | companion object {
19 | private const val TAG = "TextTextureCache"
20 | }
21 |
22 | private val cacheMap = mutableMapOf()
23 |
24 | /**
25 | * Get a texture for a given string. If that string hasn't been used yet, create a texture for it
26 | * and cache the result.
27 | */
28 | fun get(render: SampleRender, string: String): Texture {
29 | return cacheMap.computeIfAbsent(string) {
30 | generateTexture(render, string)
31 | }
32 | }
33 |
34 | private fun generateTexture(render: SampleRender, string: String): Texture {
35 | val texture = Texture(render, Texture.Target.TEXTURE_2D, Texture.WrapMode.CLAMP_TO_EDGE)
36 |
37 | val bitmap = generateBitmapFromString(string)
38 | val buffer = ByteBuffer.allocateDirect(bitmap.byteCount)
39 | bitmap.copyPixelsToBuffer(buffer)
40 | buffer.rewind()
41 |
42 | GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, texture.textureId[0])
43 | GLError.maybeThrowGLException("Failed to bind texture", "glBindTexture")
44 | GLES30.glTexImage2D(
45 | GLES30.GL_TEXTURE_2D,
46 | 0,
47 | GLES30.GL_RGBA8,
48 | bitmap.width,
49 | bitmap.height,
50 | 0,
51 | GLES30.GL_RGBA,
52 | GLES30.GL_UNSIGNED_BYTE,
53 | buffer
54 | )
55 | GLError.maybeThrowGLException("Failed to populate texture data", "glTexImage2D")
56 | GLES30.glGenerateMipmap(GLES30.GL_TEXTURE_2D)
57 | GLError.maybeThrowGLException("Failed to generate mipmaps", "glGenerateMipmap")
58 |
59 | return texture
60 | }
61 |
62 | val textPaint = Paint().apply {
63 | textSize = 26f
64 | setARGB(0xff, 0xea, 0x43, 0x35)
65 | style = Paint.Style.FILL
66 | isAntiAlias = true
67 | textAlign = Paint.Align.CENTER
68 | typeface = Typeface.DEFAULT_BOLD
69 | strokeWidth = 2f
70 | }
71 |
72 | val strokePaint = Paint(textPaint).apply {
73 | setARGB(0xff, 0x00, 0x00, 0x00)
74 | style = Paint.Style.STROKE
75 | }
76 |
77 | private fun generateBitmapFromString(string: String): Bitmap {
78 | val w = 256
79 | val h = 256
80 | return Bitmap.createBitmap(w, h, Bitmap.Config.ARGB_8888).apply {
81 | eraseColor(0)
82 |
83 | Canvas(this).apply {
84 | drawText(string, w / 2f, h / 2f, strokePaint)
85 |
86 | drawText(string, w / 2f, h / 2f, textPaint)
87 | }
88 | }
89 | }
90 | }
--------------------------------------------------------------------------------
/app/src/main/java/io/intelligible/arcoremlkit/utils/CameraUtils.kt:
--------------------------------------------------------------------------------
1 | package io.intelligible.arcoremlkit.utils
2 |
3 | import android.Manifest
4 | import android.app.Activity
5 | import android.content.Intent
6 | import android.content.pm.PackageManager
7 | import android.net.Uri
8 | import android.provider.Settings
9 | import androidx.core.app.ActivityCompat
10 | import androidx.core.content.ContextCompat
11 |
12 |
13 | /** Helper to ask camera permission. */
14 | object CameraPermissionHelper {
15 | private const val CAMERA_PERMISSION_CODE = 0
16 | private const val CAMERA_PERMISSION = Manifest.permission.CAMERA
17 |
18 | /** Check to see we have the necessary permissions for this app. */
19 | fun hasCameraPermission(activity: Activity?): Boolean {
20 | return (ContextCompat.checkSelfPermission(activity!!, CAMERA_PERMISSION)
21 | == PackageManager.PERMISSION_GRANTED)
22 | }
23 |
24 | /** Check to see we have the necessary permissions for this app, and ask for them if we don't. */
25 | fun requestCameraPermission(activity: Activity?) {
26 | ActivityCompat.requestPermissions(
27 | activity!!, arrayOf(CAMERA_PERMISSION), CAMERA_PERMISSION_CODE
28 | )
29 | }
30 |
31 | /** Check to see if we need to show the rationale for this permission. */
32 | fun shouldShowRequestPermissionRationale(activity: Activity?): Boolean {
33 | return ActivityCompat.shouldShowRequestPermissionRationale(activity!!, CAMERA_PERMISSION)
34 | }
35 |
36 | /** Launch Application Setting to grant permission. */
37 | fun launchPermissionSettings(activity: Activity) {
38 | val intent = Intent()
39 | intent.action = Settings.ACTION_APPLICATION_DETAILS_SETTINGS
40 | intent.data = Uri.fromParts("package", activity.packageName, null)
41 | activity.startActivity(intent)
42 | }
43 | }
44 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable-v24/ic_launcher_foreground.xml:
--------------------------------------------------------------------------------
1 |
7 |
8 |
9 |
15 |
18 |
21 |
22 |
23 |
24 |
30 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/ic_launcher_background.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
10 |
15 |
20 |
25 |
30 |
35 |
40 |
45 |
50 |
55 |
60 |
65 |
70 |
75 |
80 |
85 |
90 |
95 |
100 |
105 |
110 |
115 |
120 |
125 |
130 |
135 |
140 |
145 |
150 |
155 |
160 |
165 |
170 |
171 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_main.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
8 |
12 |
13 |
19 |
20 |
29 |
36 |
37 |
38 |
45 |
52 |
53 |
54 |
55 |
56 |
65 |
66 |
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-hdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kashif-E/ARcoreMLKit-object-detection/0b08afb79e3431c4c3a4644014ef0057b12f196b/app/src/main/res/mipmap-hdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-hdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kashif-E/ARcoreMLKit-object-detection/0b08afb79e3431c4c3a4644014ef0057b12f196b/app/src/main/res/mipmap-hdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-mdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kashif-E/ARcoreMLKit-object-detection/0b08afb79e3431c4c3a4644014ef0057b12f196b/app/src/main/res/mipmap-mdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-mdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kashif-E/ARcoreMLKit-object-detection/0b08afb79e3431c4c3a4644014ef0057b12f196b/app/src/main/res/mipmap-mdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kashif-E/ARcoreMLKit-object-detection/0b08afb79e3431c4c3a4644014ef0057b12f196b/app/src/main/res/mipmap-xhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kashif-E/ARcoreMLKit-object-detection/0b08afb79e3431c4c3a4644014ef0057b12f196b/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kashif-E/ARcoreMLKit-object-detection/0b08afb79e3431c4c3a4644014ef0057b12f196b/app/src/main/res/mipmap-xxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kashif-E/ARcoreMLKit-object-detection/0b08afb79e3431c4c3a4644014ef0057b12f196b/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kashif-E/ARcoreMLKit-object-detection/0b08afb79e3431c4c3a4644014ef0057b12f196b/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kashif-E/ARcoreMLKit-object-detection/0b08afb79e3431c4c3a4644014ef0057b12f196b/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/values-night/themes.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
16 |
--------------------------------------------------------------------------------
/app/src/main/res/values/colors.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | #FFBB86FC
4 | #FF6200EE
5 | #FF3700B3
6 | #FF03DAC5
7 | #FF018786
8 | #FF000000
9 | #FFFFFFFF
10 | #0F9D58
11 |
--------------------------------------------------------------------------------
/app/src/main/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 | ARCore MLkit
3 |
--------------------------------------------------------------------------------
/app/src/main/res/values/themes.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
16 |
--------------------------------------------------------------------------------
/app/src/test/java/io/intelligible/arcoremlkit/ExampleUnitTest.kt:
--------------------------------------------------------------------------------
1 | package io.intelligible.arcoremlkit
2 |
3 | import org.junit.Test
4 |
5 | import org.junit.Assert.*
6 |
7 | /**
8 | * Example local unit test, which will execute on the development machine (host).
9 | *
10 | * See [testing documentation](http://d.android.com/tools/testing).
11 | */
12 | class ExampleUnitTest {
13 | @Test
14 | fun addition_isCorrect() {
15 | assertEquals(4, 2 + 2)
16 | }
17 | }
--------------------------------------------------------------------------------
/build.gradle:
--------------------------------------------------------------------------------
1 | // Top-level build file where you can add configuration options common to all sub-projects/modules.
2 | buildscript {
3 | ext.kotlin_version = "1.4.32"
4 | repositories {
5 | google()
6 | jcenter()
7 | }
8 | dependencies {
9 | classpath "com.android.tools.build:gradle:4.1.2"
10 | classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
11 |
12 | // NOTE: Do not place your application dependencies here; they belong
13 | // in the individual module build.gradle files
14 | }
15 | }
16 |
17 | allprojects {
18 | repositories {
19 | google()
20 | jcenter()
21 | }
22 | }
23 |
24 | task clean(type: Delete) {
25 | delete rootProject.buildDir
26 | }
--------------------------------------------------------------------------------
/gradle.properties:
--------------------------------------------------------------------------------
1 | # Project-wide Gradle settings.
2 | # IDE (e.g. Android Studio) users:
3 | # Gradle settings configured through the IDE *will override*
4 | # any settings specified in this file.
5 | # For more details on how to configure your build environment visit
6 | # http://www.gradle.org/docs/current/userguide/build_environment.html
7 | # Specifies the JVM arguments used for the daemon process.
8 | # The setting is particularly useful for tweaking memory settings.
9 | org.gradle.jvmargs=-Xmx2048m -Dfile.encoding=UTF-8
10 | # When configured, Gradle will run in incubating parallel mode.
11 | # This option should only be used with decoupled projects. More details, visit
12 | # http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
13 | # org.gradle.parallel=true
14 | # AndroidX package structure to make it clearer which packages are bundled with the
15 | # Android operating system, and which are packaged with your app"s APK
16 | # https://developer.android.com/topic/libraries/support-library/androidx-rn
17 | android.useAndroidX=true
18 | # Automatically convert third-party libraries to use AndroidX
19 | android.enableJetifier=true
20 | # Kotlin code style for this project: "official" or "obsolete":
21 | kotlin.code.style=official
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kashif-E/ARcoreMLKit-object-detection/0b08afb79e3431c4c3a4644014ef0057b12f196b/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | #Wed Jul 07 12:16:34 PKT 2021
2 | distributionBase=GRADLE_USER_HOME
3 | distributionPath=wrapper/dists
4 | zipStoreBase=GRADLE_USER_HOME
5 | zipStorePath=wrapper/dists
6 | distributionUrl=https\://services.gradle.org/distributions/gradle-6.5-all.zip
7 |
--------------------------------------------------------------------------------
/gradlew:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 |
3 | ##############################################################################
4 | ##
5 | ## Gradle start up script for UN*X
6 | ##
7 | ##############################################################################
8 |
9 | # Attempt to set APP_HOME
10 | # Resolve links: $0 may be a link
11 | PRG="$0"
12 | # Need this for relative symlinks.
13 | while [ -h "$PRG" ] ; do
14 | ls=`ls -ld "$PRG"`
15 | link=`expr "$ls" : '.*-> \(.*\)$'`
16 | if expr "$link" : '/.*' > /dev/null; then
17 | PRG="$link"
18 | else
19 | PRG=`dirname "$PRG"`"/$link"
20 | fi
21 | done
22 | SAVED="`pwd`"
23 | cd "`dirname \"$PRG\"`/" >/dev/null
24 | APP_HOME="`pwd -P`"
25 | cd "$SAVED" >/dev/null
26 |
27 | APP_NAME="Gradle"
28 | APP_BASE_NAME=`basename "$0"`
29 |
30 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
31 | DEFAULT_JVM_OPTS=""
32 |
33 | # Use the maximum available, or set MAX_FD != -1 to use that value.
34 | MAX_FD="maximum"
35 |
36 | warn () {
37 | echo "$*"
38 | }
39 |
40 | die () {
41 | echo
42 | echo "$*"
43 | echo
44 | exit 1
45 | }
46 |
47 | # OS specific support (must be 'true' or 'false').
48 | cygwin=false
49 | msys=false
50 | darwin=false
51 | nonstop=false
52 | case "`uname`" in
53 | CYGWIN* )
54 | cygwin=true
55 | ;;
56 | Darwin* )
57 | darwin=true
58 | ;;
59 | MINGW* )
60 | msys=true
61 | ;;
62 | NONSTOP* )
63 | nonstop=true
64 | ;;
65 | esac
66 |
67 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
68 |
69 | # Determine the Java command to use to start the JVM.
70 | if [ -n "$JAVA_HOME" ] ; then
71 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
72 | # IBM's JDK on AIX uses strange locations for the executables
73 | JAVACMD="$JAVA_HOME/jre/sh/java"
74 | else
75 | JAVACMD="$JAVA_HOME/bin/java"
76 | fi
77 | if [ ! -x "$JAVACMD" ] ; then
78 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
79 |
80 | Please set the JAVA_HOME variable in your environment to match the
81 | location of your Java installation."
82 | fi
83 | else
84 | JAVACMD="java"
85 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
86 |
87 | Please set the JAVA_HOME variable in your environment to match the
88 | location of your Java installation."
89 | fi
90 |
91 | # Increase the maximum file descriptors if we can.
92 | if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
93 | MAX_FD_LIMIT=`ulimit -H -n`
94 | if [ $? -eq 0 ] ; then
95 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
96 | MAX_FD="$MAX_FD_LIMIT"
97 | fi
98 | ulimit -n $MAX_FD
99 | if [ $? -ne 0 ] ; then
100 | warn "Could not set maximum file descriptor limit: $MAX_FD"
101 | fi
102 | else
103 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
104 | fi
105 | fi
106 |
107 | # For Darwin, add options to specify how the application appears in the dock
108 | if $darwin; then
109 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
110 | fi
111 |
112 | # For Cygwin, switch paths to Windows format before running java
113 | if $cygwin ; then
114 | APP_HOME=`cygpath --path --mixed "$APP_HOME"`
115 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
116 | JAVACMD=`cygpath --unix "$JAVACMD"`
117 |
118 | # We build the pattern for arguments to be converted via cygpath
119 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
120 | SEP=""
121 | for dir in $ROOTDIRSRAW ; do
122 | ROOTDIRS="$ROOTDIRS$SEP$dir"
123 | SEP="|"
124 | done
125 | OURCYGPATTERN="(^($ROOTDIRS))"
126 | # Add a user-defined pattern to the cygpath arguments
127 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then
128 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
129 | fi
130 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
131 | i=0
132 | for arg in "$@" ; do
133 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
134 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
135 |
136 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
137 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
138 | else
139 | eval `echo args$i`="\"$arg\""
140 | fi
141 | i=$((i+1))
142 | done
143 | case $i in
144 | (0) set -- ;;
145 | (1) set -- "$args0" ;;
146 | (2) set -- "$args0" "$args1" ;;
147 | (3) set -- "$args0" "$args1" "$args2" ;;
148 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
149 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
150 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
151 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
152 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
153 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
154 | esac
155 | fi
156 |
157 | # Escape application args
158 | save () {
159 | for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
160 | echo " "
161 | }
162 | APP_ARGS=$(save "$@")
163 |
164 | # Collect all arguments for the java command, following the shell quoting and substitution rules
165 | eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
166 |
167 | # by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
168 | if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
169 | cd "$(dirname "$0")"
170 | fi
171 |
172 | exec "$JAVACMD" "$@"
173 |
--------------------------------------------------------------------------------
/gradlew.bat:
--------------------------------------------------------------------------------
1 | @if "%DEBUG%" == "" @echo off
2 | @rem ##########################################################################
3 | @rem
4 | @rem Gradle startup script for Windows
5 | @rem
6 | @rem ##########################################################################
7 |
8 | @rem Set local scope for the variables with windows NT shell
9 | if "%OS%"=="Windows_NT" setlocal
10 |
11 | set DIRNAME=%~dp0
12 | if "%DIRNAME%" == "" set DIRNAME=.
13 | set APP_BASE_NAME=%~n0
14 | set APP_HOME=%DIRNAME%
15 |
16 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
17 | set DEFAULT_JVM_OPTS=
18 |
19 | @rem Find java.exe
20 | if defined JAVA_HOME goto findJavaFromJavaHome
21 |
22 | set JAVA_EXE=java.exe
23 | %JAVA_EXE% -version >NUL 2>&1
24 | if "%ERRORLEVEL%" == "0" goto init
25 |
26 | echo.
27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
28 | echo.
29 | echo Please set the JAVA_HOME variable in your environment to match the
30 | echo location of your Java installation.
31 |
32 | goto fail
33 |
34 | :findJavaFromJavaHome
35 | set JAVA_HOME=%JAVA_HOME:"=%
36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
37 |
38 | if exist "%JAVA_EXE%" goto init
39 |
40 | echo.
41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
42 | echo.
43 | echo Please set the JAVA_HOME variable in your environment to match the
44 | echo location of your Java installation.
45 |
46 | goto fail
47 |
48 | :init
49 | @rem Get command-line arguments, handling Windows variants
50 |
51 | if not "%OS%" == "Windows_NT" goto win9xME_args
52 |
53 | :win9xME_args
54 | @rem Slurp the command line arguments.
55 | set CMD_LINE_ARGS=
56 | set _SKIP=2
57 |
58 | :win9xME_args_slurp
59 | if "x%~1" == "x" goto execute
60 |
61 | set CMD_LINE_ARGS=%*
62 |
63 | :execute
64 | @rem Setup the command line
65 |
66 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
67 |
68 | @rem Execute Gradle
69 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
70 |
71 | :end
72 | @rem End local scope for the variables with windows NT shell
73 | if "%ERRORLEVEL%"=="0" goto mainEnd
74 |
75 | :fail
76 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
77 | rem the _cmd.exe /c_ return code!
78 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
79 | exit /b 1
80 |
81 | :mainEnd
82 | if "%OS%"=="Windows_NT" endlocal
83 |
84 | :omega
85 |
--------------------------------------------------------------------------------
/settings.gradle:
--------------------------------------------------------------------------------
1 | include ':app'
2 | rootProject.name = "ARCore MLkit"
--------------------------------------------------------------------------------