├── app
├── .gitignore
├── src
│ └── main
│ │ ├── assets
│ │ ├── models
│ │ │ ├── andy.png
│ │ │ ├── anchor.png
│ │ │ ├── ear_fur.png
│ │ │ ├── trigrid.png
│ │ │ ├── andy_spec.png
│ │ │ ├── freckles.png
│ │ │ ├── nose_fur.png
│ │ │ ├── andy_shadow.png
│ │ │ ├── map_quality_bar.png
│ │ │ ├── andy_shadow.obj
│ │ │ ├── forehead_left.obj
│ │ │ └── forehead_right.obj
│ │ └── shaders
│ │ │ ├── point_cloud.frag
│ │ │ ├── screenquad.vert
│ │ │ ├── background_show_depth_color_visualization.vert
│ │ │ ├── screenquad.frag
│ │ │ ├── point_cloud.vert
│ │ │ ├── object.vert
│ │ │ ├── ar_object.vert
│ │ │ ├── plane.frag
│ │ │ ├── plane.vert
│ │ │ ├── background_show_depth_color_visualization.frag
│ │ │ ├── object.frag
│ │ │ └── ar_object.frag
│ │ ├── res
│ │ ├── drawable-hdpi
│ │ │ ├── ic_stat_close.png
│ │ │ ├── ic_stat_stop.png
│ │ │ └── ic_stat_playback.png
│ │ ├── drawable-mdpi
│ │ │ ├── ic_stat_close.png
│ │ │ ├── ic_stat_stop.png
│ │ │ └── ic_stat_playback.png
│ │ ├── drawable-xhdpi
│ │ │ ├── ic_stat_stop.png
│ │ │ ├── ic_stat_close.png
│ │ │ ├── ic_stat_record.png
│ │ │ └── ic_stat_playback.png
│ │ ├── drawable-xxhdpi
│ │ │ ├── ic_launcher.png
│ │ │ ├── ic_stat_close.png
│ │ │ ├── ic_stat_stop.png
│ │ │ └── ic_stat_playback.png
│ │ ├── drawable-anydpi
│ │ │ ├── ic_stat_playback.xml
│ │ │ ├── ic_stat_stop.xml
│ │ │ ├── ic_stat_close.xml
│ │ │ └── ic_stat_record.xml
│ │ ├── values
│ │ │ ├── strings.xml
│ │ │ └── styles.xml
│ │ └── layout
│ │ │ └── activity_main.xml
│ │ ├── java
│ │ └── com
│ │ │ └── google
│ │ │ └── ar
│ │ │ └── core
│ │ │ └── examples
│ │ │ └── kotlin
│ │ │ ├── common
│ │ │ ├── helpers
│ │ │ │ ├── FullScreenHelper.kt
│ │ │ │ ├── InstantPlacementSettings.kt
│ │ │ │ ├── CameraPermissionHelper.kt
│ │ │ │ ├── TapHelper.java
│ │ │ │ ├── TrackingStateHelper.kt
│ │ │ │ ├── DepthSettings.kt
│ │ │ │ ├── SnackbarHelper.kt
│ │ │ │ └── DisplayRotationHelper.kt
│ │ │ └── rendering
│ │ │ │ ├── ShaderUtil.java
│ │ │ │ ├── PointCloudRenderer.kt
│ │ │ │ ├── BackgroundRenderer.kt
│ │ │ │ ├── PlaneRenderer.java
│ │ │ │ └── ObjectRenderer.kt
│ │ │ └── hellorecordingplayback
│ │ │ └── HelloRecordingPlaybackActivity.kt
│ │ └── AndroidManifest.xml
├── proguard-rules.pro
└── build.gradle
├── settings.gradle
├── gradle
└── wrapper
│ ├── gradle-wrapper.jar
│ └── gradle-wrapper.properties
├── .gitignore
├── README.md
├── gradle.properties
├── gradlew.bat
└── gradlew
/app/.gitignore:
--------------------------------------------------------------------------------
1 | /build
2 | google-services.json
3 |
--------------------------------------------------------------------------------
/settings.gradle:
--------------------------------------------------------------------------------
1 | include ':app'
2 | rootProject.name ="recording_playback_kotlin"
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kashif-E/recording_playback_arcore_kotlin/HEAD/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/app/src/main/assets/models/andy.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kashif-E/recording_playback_arcore_kotlin/HEAD/app/src/main/assets/models/andy.png
--------------------------------------------------------------------------------
/app/src/main/assets/models/anchor.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kashif-E/recording_playback_arcore_kotlin/HEAD/app/src/main/assets/models/anchor.png
--------------------------------------------------------------------------------
/app/src/main/assets/models/ear_fur.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kashif-E/recording_playback_arcore_kotlin/HEAD/app/src/main/assets/models/ear_fur.png
--------------------------------------------------------------------------------
/app/src/main/assets/models/trigrid.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kashif-E/recording_playback_arcore_kotlin/HEAD/app/src/main/assets/models/trigrid.png
--------------------------------------------------------------------------------
/app/src/main/assets/models/andy_spec.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kashif-E/recording_playback_arcore_kotlin/HEAD/app/src/main/assets/models/andy_spec.png
--------------------------------------------------------------------------------
/app/src/main/assets/models/freckles.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kashif-E/recording_playback_arcore_kotlin/HEAD/app/src/main/assets/models/freckles.png
--------------------------------------------------------------------------------
/app/src/main/assets/models/nose_fur.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kashif-E/recording_playback_arcore_kotlin/HEAD/app/src/main/assets/models/nose_fur.png
--------------------------------------------------------------------------------
/app/src/main/assets/models/andy_shadow.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kashif-E/recording_playback_arcore_kotlin/HEAD/app/src/main/assets/models/andy_shadow.png
--------------------------------------------------------------------------------
/app/src/main/assets/models/map_quality_bar.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kashif-E/recording_playback_arcore_kotlin/HEAD/app/src/main/assets/models/map_quality_bar.png
--------------------------------------------------------------------------------
/app/src/main/res/drawable-hdpi/ic_stat_close.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kashif-E/recording_playback_arcore_kotlin/HEAD/app/src/main/res/drawable-hdpi/ic_stat_close.png
--------------------------------------------------------------------------------
/app/src/main/res/drawable-hdpi/ic_stat_stop.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kashif-E/recording_playback_arcore_kotlin/HEAD/app/src/main/res/drawable-hdpi/ic_stat_stop.png
--------------------------------------------------------------------------------
/app/src/main/res/drawable-mdpi/ic_stat_close.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kashif-E/recording_playback_arcore_kotlin/HEAD/app/src/main/res/drawable-mdpi/ic_stat_close.png
--------------------------------------------------------------------------------
/app/src/main/res/drawable-mdpi/ic_stat_stop.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kashif-E/recording_playback_arcore_kotlin/HEAD/app/src/main/res/drawable-mdpi/ic_stat_stop.png
--------------------------------------------------------------------------------
/app/src/main/res/drawable-xhdpi/ic_stat_stop.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kashif-E/recording_playback_arcore_kotlin/HEAD/app/src/main/res/drawable-xhdpi/ic_stat_stop.png
--------------------------------------------------------------------------------
/app/src/main/res/drawable-xxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kashif-E/recording_playback_arcore_kotlin/HEAD/app/src/main/res/drawable-xxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/drawable-xhdpi/ic_stat_close.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kashif-E/recording_playback_arcore_kotlin/HEAD/app/src/main/res/drawable-xhdpi/ic_stat_close.png
--------------------------------------------------------------------------------
/app/src/main/res/drawable-xhdpi/ic_stat_record.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kashif-E/recording_playback_arcore_kotlin/HEAD/app/src/main/res/drawable-xhdpi/ic_stat_record.png
--------------------------------------------------------------------------------
/app/src/main/res/drawable-xxhdpi/ic_stat_close.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kashif-E/recording_playback_arcore_kotlin/HEAD/app/src/main/res/drawable-xxhdpi/ic_stat_close.png
--------------------------------------------------------------------------------
/app/src/main/res/drawable-xxhdpi/ic_stat_stop.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kashif-E/recording_playback_arcore_kotlin/HEAD/app/src/main/res/drawable-xxhdpi/ic_stat_stop.png
--------------------------------------------------------------------------------
/app/src/main/res/drawable-hdpi/ic_stat_playback.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kashif-E/recording_playback_arcore_kotlin/HEAD/app/src/main/res/drawable-hdpi/ic_stat_playback.png
--------------------------------------------------------------------------------
/app/src/main/res/drawable-mdpi/ic_stat_playback.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kashif-E/recording_playback_arcore_kotlin/HEAD/app/src/main/res/drawable-mdpi/ic_stat_playback.png
--------------------------------------------------------------------------------
/app/src/main/res/drawable-xhdpi/ic_stat_playback.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kashif-E/recording_playback_arcore_kotlin/HEAD/app/src/main/res/drawable-xhdpi/ic_stat_playback.png
--------------------------------------------------------------------------------
/app/src/main/res/drawable-xxhdpi/ic_stat_playback.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kashif-E/recording_playback_arcore_kotlin/HEAD/app/src/main/res/drawable-xxhdpi/ic_stat_playback.png
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Android Studio configuration.
2 | *.iml
3 | .idea/
4 |
5 | # Gradle configuration.
6 | .gradle/
7 | build/
8 |
9 | # User configuration.
10 | local.properties
11 |
12 | # OS configurations.
13 | .DS_Store
14 |
15 | # Android NDK cmake output.
16 | .cxx/
17 |
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | #Mon Nov 20 10:27:45 PST 2017
2 | distributionBase=GRADLE_USER_HOME
3 | distributionPath=wrapper/dists
4 | zipStoreBase=GRADLE_USER_HOME
5 | zipStorePath=wrapper/dists
6 | distributionUrl=https\://services.gradle.org/distributions/gradle-5.6.4-all.zip
7 |
--------------------------------------------------------------------------------
/app/src/main/assets/models/andy_shadow.obj:
--------------------------------------------------------------------------------
1 | # This file uses centimeters as units for non-parametric coordinates.
2 |
3 | g default
4 | v -0.100000 -0.000000 0.100000
5 | v 0.100000 -0.000000 0.100000
6 | v -0.100000 0.000000 -0.100000
7 | v 0.100000 0.000000 -0.100000
8 | vt 0.000000 0.000000
9 | vt 1.000000 0.000000
10 | vt 0.000000 1.000000
11 | vt 1.000000 1.000000
12 | vn 0.000000 1.000000 0.000000
13 | vn 0.000000 1.000000 0.000000
14 | vn 0.000000 1.000000 0.000000
15 | vn 0.000000 1.000000 0.000000
16 | s off
17 | g AndyBlobShadow_GEO
18 | f 4/4/1 3/3/2 1/1/3 2/2/4
19 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable-anydpi/ic_stat_playback.xml:
--------------------------------------------------------------------------------
1 |
7 |
11 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable-anydpi/ic_stat_stop.xml:
--------------------------------------------------------------------------------
1 |
7 |
11 |
15 |
16 |
17 |
--------------------------------------------------------------------------------
/app/src/main/assets/models/forehead_left.obj:
--------------------------------------------------------------------------------
1 | # This file uses centimeters as units for non-parametric coordinates.
2 |
3 | mtllib forehead_left.mtl
4 | g skinCluster1Set tweakSet1
5 | v 0.088114 0.052146 0.025014
6 | v 0.034652 0.109164 0.007964
7 | v 0.046345 -0.000129 -0.018835
8 | v -0.007117 0.056890 -0.035885
9 | vt 0.005966 0.994333
10 | vt 0.993035 0.994333
11 | vt 0.005966 0.006373
12 | vt 0.993035 0.006373
13 | vn -0.529919 -0.255013 0.808798
14 | vn -0.529919 -0.255013 0.808798
15 | vn -0.529919 -0.255013 0.808798
16 | vn -0.529919 -0.255013 0.808798
17 | s 1
18 | g earLeft_mesh asset
19 | usemtl earLeft_meshSG
20 | f 4/4/1 1/1/2 2/2/3
21 | f 1/1/2 4/4/1 3/3/4
22 |
--------------------------------------------------------------------------------
/app/src/main/assets/models/forehead_right.obj:
--------------------------------------------------------------------------------
1 | # This file uses centimeters as units for non-parametric coordinates.
2 |
3 | mtllib forehead_right.mtl
4 | g skinCluster2Set tweakSet2
5 | v -0.046329 -0.000137 -0.018811
6 | v 0.007133 0.056882 -0.035861
7 | v -0.088098 0.052138 0.025037
8 | v -0.034636 0.109157 0.007988
9 | vt 0.002194 0.001364
10 | vt 0.995606 0.001364
11 | vt 0.002194 0.996801
12 | vt 0.995606 0.996801
13 | vn 0.529919 -0.255013 0.808798
14 | vn 0.529919 -0.255013 0.808798
15 | vn 0.529919 -0.255013 0.808798
16 | vn 0.529919 -0.255013 0.808798
17 | s 1
18 | g earRight_mesh asset
19 | usemtl earLeft_meshSG
20 | f 3/3/1 2/2/2 4/4/3
21 | f 2/2/2 3/3/1 1/1/4
22 |
--------------------------------------------------------------------------------
/app/proguard-rules.pro:
--------------------------------------------------------------------------------
1 | # Add project specific ProGuard rules here.
2 | # By default, the flags in this file are appended to flags specified
3 | # in /opt/android-sdk/tools/proguard/proguard-android.txt
4 | # You can edit the include path and order by changing the proguardFiles
5 | # directive in build.gradle.
6 | #
7 | # For more details, see
8 | # http://developer.android.com/guide/developing/tools/proguard.html
9 |
10 | # Add any project specific keep options here:
11 |
12 | # If your project uses WebView with JS, uncomment the following
13 | # and specify the fully qualified class name to the JavaScript interface
14 | # class:
15 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
16 | # public *;
17 | #}
18 |
--------------------------------------------------------------------------------
/app/src/main/assets/shaders/point_cloud.frag:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Google LLC
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | precision mediump float;
18 | varying vec4 v_Color;
19 |
20 | void main() {
21 | gl_FragColor = v_Color;
22 | }
23 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable-anydpi/ic_stat_close.xml:
--------------------------------------------------------------------------------
1 |
7 |
11 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/app/src/main/assets/shaders/screenquad.vert:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Google LLC
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | attribute vec4 a_Position;
18 | attribute vec2 a_TexCoord;
19 |
20 | varying vec2 v_TexCoord;
21 |
22 | void main() {
23 | gl_Position = a_Position;
24 | v_TexCoord = a_TexCoord;
25 | }
26 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # ArCore Recording and Playback API kotlin implementation
2 | Place 3d objects on a 2d video using Google #arcore recording and playback API. Google announced this API on Google IO this year. It has many use cases and can make use of AR much easier, for both android developers and augmentedreality users. Imagine you are sitting in your office but have to check if an IKEA desk, Homedepot bathroom decorating idea will look good in your space? an appliance from Amazon or the new colorful imac2021 from Apple, you don't have to go back home just open the video and add a 3d model on the video you had previously recorded. All the examples in Arcore SDK are in java, So i made this Kotlin for those who do not have experience with Java.
3 |
4 | Working of the API can be checked in the video below
5 |
6 | https://user-images.githubusercontent.com/61690178/121672641-77ad1080-cac9-11eb-8469-56afb61465b5.mp4
7 |
8 |
9 |
--------------------------------------------------------------------------------
/app/src/main/assets/shaders/background_show_depth_color_visualization.vert:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 Google LLC
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | attribute vec4 a_Position;
18 | attribute vec2 a_TexCoord;
19 |
20 | varying vec2 v_TexCoord;
21 |
22 | void main() {
23 | v_TexCoord = a_TexCoord;
24 | gl_Position = a_Position;
25 | }
26 |
--------------------------------------------------------------------------------
/app/src/main/assets/shaders/screenquad.frag:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Google LLC
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | #extension GL_OES_EGL_image_external : require
17 |
18 | precision mediump float;
19 | varying vec2 v_TexCoord;
20 | uniform samplerExternalOES sTexture;
21 |
22 |
23 | void main() {
24 | gl_FragColor = texture2D(sTexture, v_TexCoord);
25 | }
26 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable-anydpi/ic_stat_record.xml:
--------------------------------------------------------------------------------
1 |
7 |
11 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/gradle.properties:
--------------------------------------------------------------------------------
1 | # Project-wide Gradle settings.
2 |
3 | # IDE (e.g. Android Studio) users:
4 | # Gradle settings configured through the IDE *will override*
5 | # any settings specified in this file.
6 |
7 | # For more details on how to configure your build environment visit
8 | # http://www.gradle.org/docs/current/userguide/build_environment.html
9 |
10 | # Specifies the JVM arguments used for the daemon process.
11 | # The setting is particularly useful for tweaking memory settings.
12 | org.gradle.jvmargs=-Xmx1536m
13 |
14 | # When configured, Gradle will run in incubating parallel mode.
15 | # This option should only be used with decoupled projects. More details, visit
16 | # http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
17 | # org.gradle.parallel=true
18 |
19 | # Migrating to AndroidX, for targetSdkVersion 29.
20 | # For details, see https://developer.android.com/jetpack/androidx/migrate
21 | android.useAndroidX=true
22 | android.enableJetifier=true
23 |
--------------------------------------------------------------------------------
/app/src/main/assets/shaders/point_cloud.vert:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Google LLC
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | uniform mat4 u_ModelViewProjection;
18 | uniform vec4 u_Color;
19 | uniform float u_PointSize;
20 |
21 | attribute vec4 a_Position;
22 |
23 | varying vec4 v_Color;
24 |
25 | void main() {
26 | v_Color = u_Color;
27 | gl_Position = u_ModelViewProjection * vec4(a_Position.xyz, 1.0);
28 | gl_PointSize = u_PointSize;
29 | }
30 |
--------------------------------------------------------------------------------
/app/src/main/assets/shaders/object.vert:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Google LLC
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | uniform mat4 u_ModelView;
18 | uniform mat4 u_ModelViewProjection;
19 |
20 | attribute vec4 a_Position;
21 | attribute vec3 a_Normal;
22 | attribute vec2 a_TexCoord;
23 |
24 | varying vec3 v_ViewPosition;
25 | varying vec3 v_ViewNormal;
26 | varying vec2 v_TexCoord;
27 |
28 | void main() {
29 | v_ViewPosition = (u_ModelView * a_Position).xyz;
30 | v_ViewNormal = normalize((u_ModelView * vec4(a_Normal, 0.0)).xyz);
31 | v_TexCoord = a_TexCoord;
32 | gl_Position = u_ModelViewProjection * a_Position;
33 | }
34 |
--------------------------------------------------------------------------------
/app/src/main/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 |
17 |
18 | HelloAR Recording and Playback
19 | START RECORDING
20 | STOP RECORDING
21 | PLAYBACK
22 | Playback: %1$s
23 | Recording: %1$s
24 |
25 |
--------------------------------------------------------------------------------
/app/src/main/assets/shaders/ar_object.vert:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Google LLC
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | uniform mat4 u_ModelView;
18 | uniform mat4 u_ModelViewProjection;
19 |
20 | attribute vec4 a_Position;
21 | attribute vec3 a_Normal;
22 | attribute vec2 a_TexCoord;
23 |
24 | varying vec3 v_ViewPosition;
25 | varying vec3 v_ViewNormal;
26 | varying vec2 v_TexCoord;
27 | varying vec3 v_ScreenSpacePosition;
28 |
29 | void main() {
30 | v_ViewPosition = (u_ModelView * a_Position).xyz;
31 | v_ViewNormal = normalize((u_ModelView * vec4(a_Normal, 0.0)).xyz);
32 | v_TexCoord = a_TexCoord;
33 | gl_Position = u_ModelViewProjection * a_Position;
34 | v_ScreenSpacePosition = gl_Position.xyz / gl_Position.w;
35 | }
36 |
--------------------------------------------------------------------------------
/app/src/main/assets/shaders/plane.frag:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Google LLC
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | precision highp float;
18 | uniform sampler2D u_Texture;
19 | uniform vec4 u_gridControl; // dotThreshold, lineThreshold, lineFadeShrink, occlusionShrink
20 | varying vec3 v_TexCoordAlpha;
21 |
22 | void main() {
23 | vec4 control = texture2D(u_Texture, v_TexCoordAlpha.xy);
24 | float dotScale = v_TexCoordAlpha.z;
25 | float lineFade = max(0.0, u_gridControl.z * v_TexCoordAlpha.z - (u_gridControl.z - 1.0));
26 | float alpha = (control.r * dotScale > u_gridControl.x) ? 1.0
27 | : (control.g > u_gridControl.y) ? lineFade
28 | : (0.25 * lineFade);
29 | gl_FragColor = vec4(alpha * v_TexCoordAlpha.z);
30 | }
31 |
--------------------------------------------------------------------------------
/app/src/main/res/values/styles.xml:
--------------------------------------------------------------------------------
1 |
16 |
17 |
18 |
22 |
29 |
30 |
31 |
34 |
35 |
36 |
--------------------------------------------------------------------------------
/app/build.gradle:
--------------------------------------------------------------------------------
1 | apply plugin: 'com.android.application'
2 | apply plugin: 'kotlin-android'
3 |
4 | android {
5 | compileSdkVersion 29
6 | defaultConfig {
7 | applicationId "com.google.ar.core.examples.kotlin.hellorecordingplayback"
8 |
9 | // AR Optional apps must declare minSdkVersion >= 14.
10 | // AR Required apps must declare minSdkVersion >= 24.
11 | minSdkVersion 24
12 | targetSdkVersion 29
13 | versionCode 1
14 | versionName '1.0'
15 | }
16 | compileOptions {
17 | sourceCompatibility JavaVersion.VERSION_1_8
18 | targetCompatibility JavaVersion.VERSION_1_8
19 | }
20 | buildTypes {
21 | release {
22 | minifyEnabled false
23 | proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
24 | }
25 | }
26 |
27 | dataBinding{
28 | enabled true
29 | }
30 | }
31 |
32 | dependencies {
33 | // ARCore (Google Play Services for AR) library.
34 | implementation 'com.google.ar:core:1.24.0'
35 |
36 | // Obj - a simple Wavefront OBJ file loader
37 | // https://github.com/javagl/Obj
38 | implementation 'de.javagl:obj:0.2.1'
39 |
40 | implementation 'androidx.appcompat:appcompat:1.1.0'
41 | implementation 'com.google.android.material:material:1.1.0'
42 | implementation 'androidx.constraintlayout:constraintlayout:2.0.4'
43 |
44 | compile 'joda-time:joda-time:2.10.5'
45 | implementation "androidx.core:core-ktx:+"
46 | implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
47 | }
48 | repositories {
49 | mavenCentral()
50 | }
51 |
--------------------------------------------------------------------------------
/app/src/main/assets/shaders/plane.vert:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Google LLC
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | uniform mat4 u_Model;
18 | uniform mat4 u_ModelViewProjection;
19 | uniform mat2 u_PlaneUvMatrix;
20 | uniform vec3 u_Normal;
21 |
22 | attribute vec3 a_XZPositionAlpha; // (x, z, alpha)
23 |
24 | varying vec3 v_TexCoordAlpha;
25 |
26 | void main() {
27 | vec4 local_pos = vec4(a_XZPositionAlpha.x, 0.0, a_XZPositionAlpha.y, 1.0);
28 | vec4 world_pos = u_Model * local_pos;
29 |
30 | // Construct two vectors that are orthogonal to the normal.
31 | // This arbitrary choice is not co-linear with either horizontal
32 | // or vertical plane normals.
33 | const vec3 arbitrary = vec3(1.0, 1.0, 0.0);
34 | vec3 vec_u = normalize(cross(u_Normal, arbitrary));
35 | vec3 vec_v = normalize(cross(u_Normal, vec_u));
36 |
37 | // Project vertices in world frame onto vec_u and vec_v.
38 | vec2 uv = vec2(dot(world_pos.xyz, vec_u), dot(world_pos.xyz, vec_v));
39 | v_TexCoordAlpha = vec3(u_PlaneUvMatrix * uv, a_XZPositionAlpha.z);
40 | gl_Position = u_ModelViewProjection * local_pos;
41 | }
42 |
--------------------------------------------------------------------------------
/app/src/main/java/com/google/ar/core/examples/kotlin/common/helpers/FullScreenHelper.kt:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Google LLC
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.google.ar.core.examples.kotlin.common.helpers
17 |
18 | import android.app.Activity
19 | import android.view.View
20 |
21 | /** Helper to set up the Android full screen mode. */
22 | object FullScreenHelper {
23 | /**
24 | * Sets the Android fullscreen flags. Expected to be called from [ ][Activity.onWindowFocusChanged].
25 | *
26 | * @param activity the Activity on which the full screen mode will be set.
27 | * @param hasFocus the hasFocus flag passed from the [Activity.onWindowFocusChanged] callback.
28 | */
29 | fun setFullScreenOnWindowFocusChanged(activity: Activity, hasFocus: Boolean) {
30 | if (hasFocus) {
31 | // https://developer.android.com/training/system-ui/immersive.html#sticky
32 | activity
33 | .window
34 | .decorView.systemUiVisibility = (View.SYSTEM_UI_FLAG_LAYOUT_STABLE
35 | or View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION
36 | or View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN
37 | or View.SYSTEM_UI_FLAG_HIDE_NAVIGATION
38 | or View.SYSTEM_UI_FLAG_FULLSCREEN
39 | or View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY)
40 | }
41 | }
42 | }
--------------------------------------------------------------------------------
/app/src/main/java/com/google/ar/core/examples/kotlin/common/helpers/InstantPlacementSettings.kt:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 Google LLC
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.google.ar.core.examples.kotlin.common.helpers
17 |
18 | import android.content.Context
19 | import android.content.SharedPreferences
20 |
21 | /** Manages the Instant Placement option setting and shared preferences. */
22 | class InstantPlacementSettings {
23 | private var instantPlacementEnabled = true
24 | private var sharedPreferences: SharedPreferences? = null
25 |
26 | /** Initializes the current settings based on the saved value. */
27 | fun onCreate(context: Context) {
28 | sharedPreferences = context.getSharedPreferences(SHARED_PREFERENCES_ID, Context.MODE_PRIVATE)
29 | instantPlacementEnabled = sharedPreferences!!.getBoolean(SHARED_PREFERENCES_INSTANT_PLACEMENT_ENABLED, false)
30 | }
31 |
32 | /** Retrieves whether Instant Placement is enabled, */
33 | fun isInstantPlacementEnabled(): Boolean {
34 | return instantPlacementEnabled
35 | }
36 |
37 | fun setInstantPlacementEnabled(enable: Boolean) {
38 | if (enable == instantPlacementEnabled) {
39 | return // No change.
40 | }
41 |
42 | // Updates the stored default settings.
43 | instantPlacementEnabled = enable
44 | val editor = sharedPreferences!!.edit()
45 | editor.putBoolean(SHARED_PREFERENCES_INSTANT_PLACEMENT_ENABLED, instantPlacementEnabled)
46 | editor.apply()
47 | }
48 |
49 | companion object {
50 | const val SHARED_PREFERENCES_ID = "SHARED_PREFERENCES_INSTANT_PLACEMENT_OPTIONS"
51 | const val SHARED_PREFERENCES_INSTANT_PLACEMENT_ENABLED = "instant_placement_enabled"
52 | }
53 | }
--------------------------------------------------------------------------------
/app/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
17 |
20 |
21 |
22 |
23 |
24 |
26 |
27 |
28 |
29 |
36 |
37 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
--------------------------------------------------------------------------------
/app/src/main/java/com/google/ar/core/examples/kotlin/common/helpers/CameraPermissionHelper.kt:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Google LLC
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.google.ar.core.examples.kotlin.common.helpers
17 |
18 | import android.Manifest
19 | import android.app.Activity
20 | import android.content.Intent
21 | import android.content.pm.PackageManager
22 | import android.net.Uri
23 | import android.provider.Settings
24 | import androidx.core.app.ActivityCompat
25 | import androidx.core.content.ContextCompat
26 |
27 | /** Helper to ask camera permission. */
28 | object CameraPermissionHelper {
29 | private const val CAMERA_PERMISSION_CODE = 0
30 | private const val CAMERA_PERMISSION = Manifest.permission.CAMERA
31 |
32 | /** Check to see we have the necessary permissions for this app. */
33 | fun hasCameraPermission(activity: Activity?): Boolean {
34 | return (ContextCompat.checkSelfPermission(activity!!, CAMERA_PERMISSION)
35 | == PackageManager.PERMISSION_GRANTED)
36 | }
37 |
38 | /** Check to see we have the necessary permissions for this app, and ask for them if we don't. */
39 | fun requestCameraPermission(activity: Activity?) {
40 | ActivityCompat.requestPermissions(
41 | activity!!, arrayOf(CAMERA_PERMISSION), CAMERA_PERMISSION_CODE)
42 | }
43 |
44 | /** Check to see if we need to show the rationale for this permission. */
45 | fun shouldShowRequestPermissionRationale(activity: Activity?): Boolean {
46 | return ActivityCompat.shouldShowRequestPermissionRationale(activity!!, CAMERA_PERMISSION)
47 | }
48 |
49 | /** Launch Application Setting to grant permission. */
50 | fun launchPermissionSettings(activity: Activity) {
51 | val intent = Intent()
52 | intent.action = Settings.ACTION_APPLICATION_DETAILS_SETTINGS
53 | intent.data = Uri.fromParts("package", activity.packageName, null)
54 | activity.startActivity(intent)
55 | }
56 | }
--------------------------------------------------------------------------------
/app/src/main/java/com/google/ar/core/examples/kotlin/common/helpers/TapHelper.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Google LLC
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.google.ar.core.examples.kotlin.common.helpers;
17 |
18 | import android.content.Context;
19 | import android.view.GestureDetector;
20 | import android.view.MotionEvent;
21 | import android.view.View;
22 | import android.view.View.OnTouchListener;
23 | import java.util.concurrent.ArrayBlockingQueue;
24 | import java.util.concurrent.BlockingQueue;
25 |
26 | /**
27 | * Helper to detect taps using Android GestureDetector, and pass the taps between UI thread and
28 | * render thread.
29 | */
30 | public final class TapHelper implements OnTouchListener {
31 | private final GestureDetector gestureDetector;
32 | private final BlockingQueue queuedSingleTaps = new ArrayBlockingQueue<>(16);
33 |
34 | /**
35 | * Creates the tap helper.
36 | *
37 | * @param context the application's context.
38 | */
39 | public TapHelper(Context context) {
40 | gestureDetector =
41 | new GestureDetector(
42 | context,
43 | new GestureDetector.SimpleOnGestureListener() {
44 | @Override
45 | public boolean onSingleTapUp(MotionEvent e) {
46 | // Queue tap if there is space. Tap is lost if queue is full.
47 | queuedSingleTaps.offer(e);
48 | return true;
49 | }
50 |
51 | @Override
52 | public boolean onDown(MotionEvent e) {
53 | return true;
54 | }
55 | });
56 | }
57 |
58 | /**
59 | * Polls for a tap.
60 | *
61 | * @return if a tap was queued, a MotionEvent for the tap. Otherwise null if no taps are queued.
62 | */
63 | public MotionEvent poll() {
64 | return queuedSingleTaps.poll();
65 | }
66 |
67 | @Override
68 | public boolean onTouch(View view, MotionEvent motionEvent) {
69 | return gestureDetector.onTouchEvent(motionEvent);
70 | }
71 | }
--------------------------------------------------------------------------------
/app/src/main/assets/shaders/background_show_depth_color_visualization.frag:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 Google LLC
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | precision mediump float;
18 |
19 | uniform sampler2D u_DepthTexture;
20 |
21 | varying vec2 v_TexCoord;
22 |
23 | const highp float kMaxDepth = 8000.0; // In millimeters.
24 |
25 | float DepthGetMillimeters(in sampler2D depth_texture, in vec2 depth_uv) {
26 | // Depth is packed into the red and green components of its texture.
27 | // The texture is a normalized format, storing millimeters.
28 | vec3 packedDepthAndVisibility = texture2D(depth_texture, depth_uv).xyz;
29 | return dot(packedDepthAndVisibility.xy, vec2(255.0, 256.0 * 255.0));
30 | }
31 |
32 | // Returns a color corresponding to the depth passed in. Colors range from red
33 | // to green to blue, where red is closest and blue is farthest.
34 | //
35 | // Uses Turbo color mapping:
36 | // https://ai.googleblog.com/2019/08/turbo-improved-rainbow-colormap-for.html
37 | vec3 DepthGetColorVisualization(in float x) {
38 | const vec4 kRedVec4 = vec4(0.55305649, 3.00913185, -5.46192616, -11.11819092);
39 | const vec4 kGreenVec4 = vec4(0.16207513, 0.17712472, 15.24091500, -36.50657960);
40 | const vec4 kBlueVec4 = vec4(-0.05195877, 5.18000081, -30.94853351, 81.96403246);
41 | const vec2 kRedVec2 = vec2(27.81927491, -14.87899417);
42 | const vec2 kGreenVec2 = vec2(25.95549545, -5.02738237);
43 | const vec2 kBlueVec2 = vec2(-86.53476570, 30.23299484);
44 | const float kInvalidDepthThreshold = 0.01;
45 |
46 | // Adjusts color space via 6 degree poly interpolation to avoid pure red.
47 | x = clamp(x * 0.9 + 0.03, 0.0, 1.0);
48 | vec4 v4 = vec4(1.0, x, x * x, x * x * x);
49 | vec2 v2 = v4.zw * v4.z;
50 | vec3 polynomial_color = vec3(
51 | dot(v4, kRedVec4) + dot(v2, kRedVec2),
52 | dot(v4, kGreenVec4) + dot(v2, kGreenVec2),
53 | dot(v4, kBlueVec4) + dot(v2, kBlueVec2)
54 | );
55 |
56 | return step(kInvalidDepthThreshold, x) * polynomial_color;
57 | }
58 |
59 | void main() {
60 | highp float normalized_depth =
61 | clamp(DepthGetMillimeters(u_DepthTexture, v_TexCoord.xy) / kMaxDepth,
62 | 0.0, 1.0);
63 | vec4 depth_color = vec4(DepthGetColorVisualization(normalized_depth), 1.0);
64 | gl_FragColor = depth_color;
65 | }
66 |
--------------------------------------------------------------------------------
/gradlew.bat:
--------------------------------------------------------------------------------
1 | @rem
2 | @rem Copyright 2015 the original author or authors.
3 | @rem
4 | @rem Licensed under the Apache License, Version 2.0 (the "License");
5 | @rem you may not use this file except in compliance with the License.
6 | @rem You may obtain a copy of the License at
7 | @rem
8 | @rem https://www.apache.org/licenses/LICENSE-2.0
9 | @rem
10 | @rem Unless required by applicable law or agreed to in writing, software
11 | @rem distributed under the License is distributed on an "AS IS" BASIS,
12 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | @rem See the License for the specific language governing permissions and
14 | @rem limitations under the License.
15 | @rem
16 |
17 | @if "%DEBUG%" == "" @echo off
18 | @rem ##########################################################################
19 | @rem
20 | @rem Gradle startup script for Windows
21 | @rem
22 | @rem ##########################################################################
23 |
24 | @rem Set local scope for the variables with windows NT shell
25 | if "%OS%"=="Windows_NT" setlocal
26 |
27 | set DIRNAME=%~dp0
28 | if "%DIRNAME%" == "" set DIRNAME=.
29 | set APP_BASE_NAME=%~n0
30 | set APP_HOME=%DIRNAME%
31 |
32 | @rem Resolve any "." and ".." in APP_HOME to make it shorter.
33 | for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
34 |
35 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
36 | set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
37 |
38 | @rem Find java.exe
39 | if defined JAVA_HOME goto findJavaFromJavaHome
40 |
41 | set JAVA_EXE=java.exe
42 | %JAVA_EXE% -version >NUL 2>&1
43 | if "%ERRORLEVEL%" == "0" goto execute
44 |
45 | echo.
46 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
47 | echo.
48 | echo Please set the JAVA_HOME variable in your environment to match the
49 | echo location of your Java installation.
50 |
51 | goto fail
52 |
53 | :findJavaFromJavaHome
54 | set JAVA_HOME=%JAVA_HOME:"=%
55 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
56 |
57 | if exist "%JAVA_EXE%" goto execute
58 |
59 | echo.
60 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
61 | echo.
62 | echo Please set the JAVA_HOME variable in your environment to match the
63 | echo location of your Java installation.
64 |
65 | goto fail
66 |
67 | :execute
68 | @rem Setup the command line
69 |
70 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
71 |
72 |
73 | @rem Execute Gradle
74 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
75 |
76 | :end
77 | @rem End local scope for the variables with windows NT shell
78 | if "%ERRORLEVEL%"=="0" goto mainEnd
79 |
80 | :fail
81 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
82 | rem the _cmd.exe /c_ return code!
83 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
84 | exit /b 1
85 |
86 | :mainEnd
87 | if "%OS%"=="Windows_NT" endlocal
88 |
89 | :omega
90 |
--------------------------------------------------------------------------------
/app/src/main/java/com/google/ar/core/examples/kotlin/common/helpers/TrackingStateHelper.kt:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 Google LLC
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.google.ar.core.examples.kotlin.common.helpers
17 |
18 | import android.app.Activity
19 | import android.view.WindowManager
20 | import com.google.ar.core.Camera
21 | import com.google.ar.core.TrackingFailureReason
22 | import com.google.ar.core.TrackingState
23 |
24 | /** Gets human readibly tracking failure reasons and suggested actions. */
25 | class TrackingStateHelper(private val activity: Activity) {
26 | private var previousTrackingState: TrackingState? = null
27 |
28 | /** Keep the screen unlocked while tracking, but allow it to lock when tracking stops. */
29 | fun updateKeepScreenOnFlag(trackingState: TrackingState) {
30 | if (trackingState == previousTrackingState) {
31 | return
32 | }
33 | previousTrackingState = trackingState
34 | when (trackingState) {
35 | TrackingState.PAUSED, TrackingState.STOPPED -> activity.runOnUiThread { activity.window.clearFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON) }
36 | TrackingState.TRACKING -> activity.runOnUiThread { activity.window.addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON) }
37 | }
38 | }
39 |
40 | companion object {
41 | private const val INSUFFICIENT_FEATURES_MESSAGE = "Can't find anything. Aim device at a surface with more texture or color."
42 | private const val EXCESSIVE_MOTION_MESSAGE = "Moving too fast. Slow down."
43 | private const val INSUFFICIENT_LIGHT_MESSAGE = "Too dark. Try moving to a well-lit area."
44 | private const val BAD_STATE_MESSAGE = "Tracking lost due to bad internal state. Please try restarting the AR experience."
45 | private const val CAMERA_UNAVAILABLE_MESSAGE = "Another app is using the camera. Tap on this app or try closing the other one."
46 | fun getTrackingFailureReasonString(camera: Camera): String {
47 | val reason = camera.trackingFailureReason
48 | return when (reason) {
49 | TrackingFailureReason.NONE -> ""
50 | TrackingFailureReason.BAD_STATE -> BAD_STATE_MESSAGE
51 | TrackingFailureReason.INSUFFICIENT_LIGHT -> INSUFFICIENT_LIGHT_MESSAGE
52 | TrackingFailureReason.EXCESSIVE_MOTION -> EXCESSIVE_MOTION_MESSAGE
53 | TrackingFailureReason.INSUFFICIENT_FEATURES -> INSUFFICIENT_FEATURES_MESSAGE
54 | TrackingFailureReason.CAMERA_UNAVAILABLE -> CAMERA_UNAVAILABLE_MESSAGE
55 | }
56 | return "Unknown tracking failure reason: $reason"
57 | }
58 | }
59 | }
--------------------------------------------------------------------------------
/app/src/main/assets/shaders/object.frag:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Google LLC
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | precision mediump float;
18 |
19 | uniform sampler2D u_Texture;
20 |
21 | uniform vec4 u_LightingParameters;
22 | uniform vec4 u_MaterialParameters;
23 | uniform vec4 u_ColorCorrectionParameters;
24 |
25 | varying vec3 v_ViewPosition;
26 | varying vec3 v_ViewNormal;
27 | varying vec2 v_TexCoord;
28 | uniform vec4 u_ObjColor;
29 |
30 | void main() {
31 | // We support approximate sRGB gamma.
32 | const float kGamma = 0.4545454;
33 | const float kInverseGamma = 2.2;
34 | const float kMiddleGrayGamma = 0.466;
35 |
36 | // Unpack lighting and material parameters for better naming.
37 | vec3 viewLightDirection = u_LightingParameters.xyz;
38 | vec3 colorShift = u_ColorCorrectionParameters.rgb;
39 | float averagePixelIntensity = u_ColorCorrectionParameters.a;
40 |
41 | float materialAmbient = u_MaterialParameters.x;
42 | float materialDiffuse = u_MaterialParameters.y;
43 | float materialSpecular = u_MaterialParameters.z;
44 | float materialSpecularPower = u_MaterialParameters.w;
45 |
46 | // Normalize varying parameters, because they are linearly interpolated in the vertex shader.
47 | vec3 viewFragmentDirection = normalize(v_ViewPosition);
48 | vec3 viewNormal = normalize(v_ViewNormal);
49 |
50 | // Flip the y-texture coordinate to address the texture from top-left.
51 | vec4 objectColor = texture2D(u_Texture, vec2(v_TexCoord.x, 1.0 - v_TexCoord.y));
52 |
53 | // Apply color to grayscale image only if the alpha of u_ObjColor is
54 | // greater and equal to 255.0.
55 | objectColor.rgb *= mix(vec3(1.0), u_ObjColor.rgb / 255.0,
56 | step(255.0, u_ObjColor.a));
57 |
58 | // Apply inverse SRGB gamma to the texture before making lighting calculations.
59 | objectColor.rgb = pow(objectColor.rgb, vec3(kInverseGamma));
60 |
61 | // Ambient light is unaffected by the light intensity.
62 | float ambient = materialAmbient;
63 |
64 | // Approximate a hemisphere light (not a harsh directional light).
65 | float diffuse = materialDiffuse *
66 | 0.5 * (dot(viewNormal, viewLightDirection) + 1.0);
67 |
68 | // Compute specular light.
69 | vec3 reflectedLightDirection = reflect(viewLightDirection, viewNormal);
70 | float specularStrength = max(0.0, dot(viewFragmentDirection, reflectedLightDirection));
71 | float specular = materialSpecular *
72 | pow(specularStrength, materialSpecularPower);
73 |
74 | vec3 color = objectColor.rgb * (ambient + diffuse) + specular;
75 | // Apply SRGB gamma before writing the fragment color.
76 | color.rgb = pow(color, vec3(kGamma));
77 | // Apply average pixel intensity and color shift
78 | color *= colorShift * (averagePixelIntensity / kMiddleGrayGamma);
79 | gl_FragColor.rgb = color;
80 | gl_FragColor.a = objectColor.a;
81 | }
82 |
--------------------------------------------------------------------------------
/app/src/main/java/com/google/ar/core/examples/kotlin/common/helpers/DepthSettings.kt:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 Google LLC
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.google.ar.core.examples.kotlin.common.helpers
17 |
18 | import android.content.Context
19 | import android.content.SharedPreferences
20 |
21 | /** Manages the Occlusion option setting and shared preferences. */
22 | class DepthSettings {
23 | // Current depth-based settings used by the app.
24 | private var depthColorVisualizationEnabled = false
25 | private var useDepthForOcclusion = false
26 | private var sharedPreferences: SharedPreferences? = null
27 |
28 | /** Initializes the current settings based on when the app was last used. */
29 | fun onCreate(context: Context) {
30 | sharedPreferences = context.getSharedPreferences(SHARED_PREFERENCES_ID, Context.MODE_PRIVATE)
31 | useDepthForOcclusion = sharedPreferences!!.getBoolean(SHARED_PREFERENCES_USE_DEPTH_FOR_OCCLUSION, false)
32 | }
33 |
34 | /** Retrieves whether depth-based occlusion is enabled. */
35 | fun useDepthForOcclusion(): Boolean {
36 | return useDepthForOcclusion
37 | }
38 |
39 | fun setUseDepthForOcclusion(enable: Boolean) {
40 | if (enable == useDepthForOcclusion) {
41 | return // No change.
42 | }
43 |
44 | // Updates the stored default settings.
45 | useDepthForOcclusion = enable
46 | val editor = sharedPreferences!!.edit()
47 | editor.putBoolean(SHARED_PREFERENCES_USE_DEPTH_FOR_OCCLUSION, useDepthForOcclusion)
48 | editor.apply()
49 | }
50 |
51 | /** Retrieves whether to render the depth map visualization instead of the camera feed. */
52 | fun depthColorVisualizationEnabled(): Boolean {
53 | return depthColorVisualizationEnabled
54 | }
55 |
56 | fun setDepthColorVisualizationEnabled(depthColorVisualizationEnabled: Boolean) {
57 | this.depthColorVisualizationEnabled = depthColorVisualizationEnabled
58 | }
59 |
60 | /** Determines if the initial prompt to use depth-based occlusion should be shown. */
61 | fun shouldShowDepthEnableDialog(): Boolean {
62 | // Checks if this dialog has been called before on this device.
63 | val showDialog = sharedPreferences!!.getBoolean(SHARED_PREFERENCES_SHOW_DEPTH_ENABLE_DIALOG_OOBE, true)
64 | if (showDialog) {
65 | // Only ever shows the dialog on the first time. If the user wants to adjust these settings
66 | // again, they can use the gear icon to invoke the settings menu dialog.
67 | val editor = sharedPreferences!!.edit()
68 | editor.putBoolean(SHARED_PREFERENCES_SHOW_DEPTH_ENABLE_DIALOG_OOBE, false)
69 | editor.apply()
70 | }
71 | return showDialog
72 | }
73 |
74 | companion object {
75 | const val SHARED_PREFERENCES_ID = "SHARED_PREFERENCES_OCCLUSION_OPTIONS"
76 | const val SHARED_PREFERENCES_SHOW_DEPTH_ENABLE_DIALOG_OOBE = "show_depth_enable_dialog_oobe"
77 | const val SHARED_PREFERENCES_USE_DEPTH_FOR_OCCLUSION = "use_depth_for_occlusion"
78 | }
79 | }
--------------------------------------------------------------------------------
/app/src/main/java/com/google/ar/core/examples/kotlin/common/helpers/SnackbarHelper.kt:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Google LLC
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.google.ar.core.examples.kotlin.common.helpers
17 |
18 | import android.R
19 | import android.app.Activity
20 | import android.view.View
21 | import android.widget.TextView
22 | import com.google.android.material.snackbar.BaseTransientBottomBar.BaseCallback
23 | import com.google.android.material.snackbar.Snackbar
24 |
25 | /**
26 | * Helper to manage the sample snackbar. Hides the Android boilerplate code, and exposes simpler
27 | * methods.
28 | */
29 | class SnackbarHelper {
30 | private var messageSnackbar: Snackbar? = null
31 |
32 | private enum class DismissBehavior {
33 | HIDE, SHOW, FINISH
34 | }
35 |
36 | private var maxLines = 2
37 | private var lastMessage = ""
38 | private var snackbarView: View? = null
39 | val isShowing: Boolean
40 | get() = messageSnackbar != null
41 |
42 | /** Shows a snackbar with a given message. */
43 | fun showMessage(activity: Activity, message: String) {
44 | if (!message.isEmpty() && (!isShowing || lastMessage != message)) {
45 | lastMessage = message
46 | show(activity, message, DismissBehavior.HIDE)
47 | }
48 | }
49 |
50 | /** Shows a snackbar with a given message, and a dismiss button. */
51 | fun showMessageWithDismiss(activity: Activity, message: String) {
52 | show(activity, message, DismissBehavior.SHOW)
53 | }
54 |
55 | /**
56 | * Shows a snackbar with a given error message. When dismissed, will finish the activity. Useful
57 | * for notifying errors, where no further interaction with the activity is possible.
58 | */
59 | fun showError(activity: Activity, errorMessage: String) {
60 | show(activity, errorMessage, DismissBehavior.FINISH)
61 | }
62 |
63 | /**
64 | * Hides the currently showing snackbar, if there is one. Safe to call from any thread. Safe to
65 | * call even if snackbar is not shown.
66 | */
67 | fun hide(activity: Activity) {
68 | if (!isShowing) {
69 | return
70 | }
71 | lastMessage = ""
72 | val messageSnackbarToHide = messageSnackbar
73 | messageSnackbar = null
74 | activity.runOnUiThread { messageSnackbarToHide!!.dismiss() }
75 | }
76 |
77 | fun setMaxLines(lines: Int) {
78 | maxLines = lines
79 | }
80 |
81 | /**
82 | * Sets the view that will be used to find a suitable parent view to hold the Snackbar view.
83 | *
84 | *
85 | * To use the root layout ([android.R.id.content]), pass in `null`.
86 | *
87 | * @param snackbarView the view to pass to [ ][com.google.android.material.snackbar.Snackbar.make] which will be used to find a
88 | * suitable parent, which is a [androidx.coordinatorlayout.widget.CoordinatorLayout], or
89 | * the window decor's content view, whichever comes first.
90 | */
91 | fun setParentView(snackbarView: View?) {
92 | this.snackbarView = snackbarView
93 | }
94 |
95 | private fun show(
96 | activity: Activity, message: String, dismissBehavior: DismissBehavior) {
97 | activity.runOnUiThread {
98 | val view = if (snackbarView == null) activity.findViewById(R.id.content) else snackbarView
99 | messageSnackbar = Snackbar.make(view!!
100 | ,
101 | message,
102 | Snackbar.LENGTH_INDEFINITE)
103 | messageSnackbar!!.view.setBackgroundColor(BACKGROUND_COLOR)
104 | if (dismissBehavior != DismissBehavior.HIDE) {
105 | messageSnackbar!!.setAction(
106 | "Dismiss"
107 | ) { messageSnackbar!!.dismiss() }
108 | if (dismissBehavior == DismissBehavior.FINISH) {
109 | messageSnackbar!!.addCallback(
110 | object : BaseCallback() {
111 | override fun onDismissed(transientBottomBar: Snackbar?, event: Int) {
112 | super.onDismissed(transientBottomBar, event)
113 | activity.finish()
114 | }
115 | })
116 | }
117 | }
118 | (messageSnackbar!!
119 | .view
120 | .findViewById(com.google.android.material.R.id.snackbar_text) as TextView).maxLines = maxLines
121 | messageSnackbar!!.show()
122 | }
123 | }
124 |
125 | companion object {
126 | private const val BACKGROUND_COLOR = -0x40cdcdce
127 | }
128 | }
--------------------------------------------------------------------------------
/app/src/main/java/com/google/ar/core/examples/kotlin/common/rendering/ShaderUtil.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Google LLC
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.google.ar.core.examples.kotlin.common.rendering;
17 |
18 | import android.content.Context;
19 | import android.opengl.GLES20;
20 | import android.util.Log;
21 | import java.io.BufferedReader;
22 | import java.io.IOException;
23 | import java.io.InputStream;
24 | import java.io.InputStreamReader;
25 | import java.util.Map;
26 | import java.util.TreeMap;
27 |
28 | /** Shader helper functions. */
29 | public class ShaderUtil {
30 | /**
31 | * Converts a raw text file, saved as a resource, into an OpenGL ES shader.
32 | *
33 | * @param type The type of shader we will be creating.
34 | * @param filename The filename of the asset file about to be turned into a shader.
35 | * @param defineValuesMap The #define values to add to the top of the shader source code.
36 | * @return The shader object handler.
37 | */
38 | public static int loadGLShader(
39 | String tag, Context context, int type, String filename, Map defineValuesMap)
40 | throws IOException {
41 | // Load shader source code.
42 | String code = readShaderFileFromAssets(context, filename);
43 |
44 | // Prepend any #define values specified during this run.
45 | String defines = "";
46 | for (Map.Entry entry : defineValuesMap.entrySet()) {
47 | defines += "#define " + entry.getKey() + " " + entry.getValue() + "\n";
48 | }
49 | code = defines + code;
50 |
51 | // Compiles shader code.
52 | int shader = GLES20.glCreateShader(type);
53 | GLES20.glShaderSource(shader, code);
54 | GLES20.glCompileShader(shader);
55 |
56 | // Get the compilation status.
57 | final int[] compileStatus = new int[1];
58 | GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
59 |
60 | // If the compilation failed, delete the shader.
61 | if (compileStatus[0] == 0) {
62 | Log.e(tag, "Error compiling shader: " + GLES20.glGetShaderInfoLog(shader));
63 | GLES20.glDeleteShader(shader);
64 | shader = 0;
65 | }
66 |
67 | if (shader == 0) {
68 | throw new RuntimeException("Error creating shader.");
69 | }
70 |
71 | return shader;
72 | }
73 |
74 | /** Overload of loadGLShader that assumes no additional #define values to add. */
75 | public static int loadGLShader(String tag, Context context, int type, String filename)
76 | throws IOException {
77 | Map emptyDefineValuesMap = new TreeMap<>();
78 | return loadGLShader(tag, context, type, filename, emptyDefineValuesMap);
79 | }
80 |
81 | /**
82 | * Checks if we've had an error inside of OpenGL ES, and if so what that error is.
83 | *
84 | * @param label Label to report in case of error.
85 | * @throws RuntimeException If an OpenGL error is detected.
86 | */
87 | public static void checkGLError(String tag, String label) {
88 | int lastError = GLES20.GL_NO_ERROR;
89 | // Drain the queue of all errors.
90 | int error;
91 | while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
92 | Log.e(tag, label + ": glError " + error);
93 | lastError = error;
94 | }
95 | if (lastError != GLES20.GL_NO_ERROR) {
96 | throw new RuntimeException(label + ": glError " + lastError);
97 | }
98 | }
99 |
100 | /**
101 | * Converts a raw shader file into a string.
102 | *
103 | * @param filename The filename of the shader file about to be turned into a shader.
104 | * @return The context of the text file, or null in case of error.
105 | */
106 | private static String readShaderFileFromAssets(Context context, String filename)
107 | throws IOException {
108 | try (InputStream inputStream = context.getAssets().open(filename);
109 | BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream))) {
110 | StringBuilder sb = new StringBuilder();
111 | String line;
112 | while ((line = reader.readLine()) != null) {
113 | String[] tokens = line.split(" ", -1);
114 | if (tokens[0].equals("#include")) {
115 | String includeFilename = tokens[1];
116 | includeFilename = includeFilename.replace("\"", "");
117 | if (includeFilename.equals(filename)) {
118 | throw new IOException("Do not include the calling file.");
119 | }
120 | sb.append(readShaderFileFromAssets(context, includeFilename));
121 | } else {
122 | sb.append(line).append("\n");
123 | }
124 | }
125 | return sb.toString();
126 | }
127 | }
128 | }
129 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_main.xml:
--------------------------------------------------------------------------------
1 |
16 |
19 |
20 |
21 |
22 |
23 |
24 |
28 |
29 |
38 |
39 |
40 |
49 |
50 |
54 |
55 |
68 |
69 |
82 |
83 |
94 |
95 |
107 |
108 |
109 |
110 |
121 |
122 |
123 |
124 |
125 |
--------------------------------------------------------------------------------
/app/src/main/java/com/google/ar/core/examples/kotlin/common/helpers/DisplayRotationHelper.kt:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Google LLC
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.google.ar.core.examples.kotlin.common.helpers
17 |
18 | import android.content.Context
19 | import android.hardware.camera2.CameraAccessException
20 | import android.hardware.camera2.CameraCharacteristics
21 | import android.hardware.camera2.CameraManager
22 | import android.hardware.display.DisplayManager
23 | import android.hardware.display.DisplayManager.DisplayListener
24 | import android.view.Display
25 | import android.view.Surface
26 | import android.view.WindowManager
27 | import com.google.ar.core.Session
28 |
29 | /**
30 | * Helper to track the display rotations. In particular, the 180 degree rotations are not notified
31 | * by the onSurfaceChanged() callback, and thus they require listening to the android display
32 | * events.
33 | */
34 | class DisplayRotationHelper(context: Context) : DisplayListener {
35 | private var viewportChanged = false
36 | private var viewportWidth = 0
37 | private var viewportHeight = 0
38 | private val display: Display
39 | private val displayManager: DisplayManager
40 | private val cameraManager: CameraManager
41 |
42 | /** Registers the display listener. Should be called from [Activity.onResume]. */
43 | fun onResume() {
44 | displayManager.registerDisplayListener(this, null)
45 | }
46 |
47 | /** Unregisters the display listener. Should be called from [Activity.onPause]. */
48 | fun onPause() {
49 | displayManager.unregisterDisplayListener(this)
50 | }
51 |
52 | /**
53 | * Records a change in surface dimensions. This will be later used by [ ][.updateSessionIfNeeded]. Should be called from [ ].
54 | *
55 | * @param width the updated width of the surface.
56 | * @param height the updated height of the surface.
57 | */
58 | fun onSurfaceChanged(width: Int, height: Int) {
59 | viewportWidth = width
60 | viewportHeight = height
61 | viewportChanged = true
62 | }
63 |
64 | /**
65 | * Updates the session display geometry if a change was posted either by [ ][.onSurfaceChanged] call or by [.onDisplayChanged] system callback. This
66 | * function should be called explicitly before each call to [Session.update]. This
67 | * function will also clear the 'pending update' (viewportChanged) flag.
68 | *
69 | * @param session the [Session] object to update if display geometry changed.
70 | */
71 | fun updateSessionIfNeeded(session: Session) {
72 | if (viewportChanged) {
73 | val displayRotation = display.rotation
74 | session.setDisplayGeometry(displayRotation, viewportWidth, viewportHeight)
75 | viewportChanged = false
76 | }
77 | }
78 |
79 | /**
80 | * Returns the aspect ratio of the GL surface viewport while accounting for the display rotation
81 | * relative to the device camera sensor orientation.
82 | */
83 | fun getCameraSensorRelativeViewportAspectRatio(cameraId: String?): Float {
84 | val aspectRatio: Float
85 | val cameraSensorToDisplayRotation = getCameraSensorToDisplayRotation(cameraId)
86 | aspectRatio = when (cameraSensorToDisplayRotation) {
87 | 90, 270 -> viewportHeight.toFloat() / viewportWidth.toFloat()
88 | 0, 180 -> viewportWidth.toFloat() / viewportHeight.toFloat()
89 | else -> throw RuntimeException("Unhandled rotation: $cameraSensorToDisplayRotation")
90 | }
91 | return aspectRatio
92 | }
93 |
94 | /**
95 | * Returns the rotation of the back-facing camera with respect to the display. The value is one of
96 | * 0, 90, 180, 270.
97 | */
98 | fun getCameraSensorToDisplayRotation(cameraId: String?): Int {
99 | val characteristics: CameraCharacteristics
100 | characteristics = try {
101 | cameraManager.getCameraCharacteristics(cameraId!!)
102 | } catch (e: CameraAccessException) {
103 | throw RuntimeException("Unable to determine display orientation", e)
104 | }
105 |
106 | // Camera sensor orientation.
107 | val sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION)!!
108 |
109 | // Current display orientation.
110 | val displayOrientation = toDegrees(display.rotation)
111 |
112 | // Make sure we return 0, 90, 180, or 270 degrees.
113 | return (sensorOrientation - displayOrientation + 360) % 360
114 | }
115 |
116 | private fun toDegrees(rotation: Int): Int {
117 | return when (rotation) {
118 | Surface.ROTATION_0 -> 0
119 | Surface.ROTATION_90 -> 90
120 | Surface.ROTATION_180 -> 180
121 | Surface.ROTATION_270 -> 270
122 | else -> throw RuntimeException("Unknown rotation $rotation")
123 | }
124 | }
125 |
126 | override fun onDisplayAdded(displayId: Int) {}
127 | override fun onDisplayRemoved(displayId: Int) {}
128 | override fun onDisplayChanged(displayId: Int) {
129 | viewportChanged = true
130 | }
131 |
132 | /**
133 | * Constructs the DisplayRotationHelper but does not register the listener yet.
134 | *
135 | * @param context the Android [Context].
136 | */
137 | init {
138 | displayManager = context.getSystemService(Context.DISPLAY_SERVICE) as DisplayManager
139 | cameraManager = context.getSystemService(Context.CAMERA_SERVICE) as CameraManager
140 | val windowManager = context.getSystemService(Context.WINDOW_SERVICE) as WindowManager
141 | display = windowManager.defaultDisplay
142 | }
143 | }
--------------------------------------------------------------------------------
/gradlew:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 |
3 | #
4 | # Copyright 2015 the original author or authors.
5 | #
6 | # Licensed under the Apache License, Version 2.0 (the "License");
7 | # you may not use this file except in compliance with the License.
8 | # You may obtain a copy of the License at
9 | #
10 | # https://www.apache.org/licenses/LICENSE-2.0
11 | #
12 | # Unless required by applicable law or agreed to in writing, software
13 | # distributed under the License is distributed on an "AS IS" BASIS,
14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | # See the License for the specific language governing permissions and
16 | # limitations under the License.
17 | #
18 |
19 | ##############################################################################
20 | ##
21 | ## Gradle start up script for UN*X
22 | ##
23 | ##############################################################################
24 |
25 | # Attempt to set APP_HOME
26 | # Resolve links: $0 may be a link
27 | PRG="$0"
28 | # Need this for relative symlinks.
29 | while [ -h "$PRG" ] ; do
30 | ls=`ls -ld "$PRG"`
31 | link=`expr "$ls" : '.*-> \(.*\)$'`
32 | if expr "$link" : '/.*' > /dev/null; then
33 | PRG="$link"
34 | else
35 | PRG=`dirname "$PRG"`"/$link"
36 | fi
37 | done
38 | SAVED="`pwd`"
39 | cd "`dirname \"$PRG\"`/" >/dev/null
40 | APP_HOME="`pwd -P`"
41 | cd "$SAVED" >/dev/null
42 |
43 | APP_NAME="Gradle"
44 | APP_BASE_NAME=`basename "$0"`
45 |
46 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
47 | DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
48 |
49 | # Use the maximum available, or set MAX_FD != -1 to use that value.
50 | MAX_FD="maximum"
51 |
52 | warn () {
53 | echo "$*"
54 | }
55 |
56 | die () {
57 | echo
58 | echo "$*"
59 | echo
60 | exit 1
61 | }
62 |
63 | # OS specific support (must be 'true' or 'false').
64 | cygwin=false
65 | msys=false
66 | darwin=false
67 | nonstop=false
68 | case "`uname`" in
69 | CYGWIN* )
70 | cygwin=true
71 | ;;
72 | Darwin* )
73 | darwin=true
74 | ;;
75 | MINGW* )
76 | msys=true
77 | ;;
78 | NONSTOP* )
79 | nonstop=true
80 | ;;
81 | esac
82 |
83 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
84 |
85 |
86 | # Determine the Java command to use to start the JVM.
87 | if [ -n "$JAVA_HOME" ] ; then
88 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
89 | # IBM's JDK on AIX uses strange locations for the executables
90 | JAVACMD="$JAVA_HOME/jre/sh/java"
91 | else
92 | JAVACMD="$JAVA_HOME/bin/java"
93 | fi
94 | if [ ! -x "$JAVACMD" ] ; then
95 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
96 |
97 | Please set the JAVA_HOME variable in your environment to match the
98 | location of your Java installation."
99 | fi
100 | else
101 | JAVACMD="java"
102 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
103 |
104 | Please set the JAVA_HOME variable in your environment to match the
105 | location of your Java installation."
106 | fi
107 |
108 | # Increase the maximum file descriptors if we can.
109 | if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
110 | MAX_FD_LIMIT=`ulimit -H -n`
111 | if [ $? -eq 0 ] ; then
112 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
113 | MAX_FD="$MAX_FD_LIMIT"
114 | fi
115 | ulimit -n $MAX_FD
116 | if [ $? -ne 0 ] ; then
117 | warn "Could not set maximum file descriptor limit: $MAX_FD"
118 | fi
119 | else
120 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
121 | fi
122 | fi
123 |
124 | # For Darwin, add options to specify how the application appears in the dock
125 | if $darwin; then
126 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
127 | fi
128 |
129 | # For Cygwin or MSYS, switch paths to Windows format before running kotlin
130 | if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then
131 | APP_HOME=`cygpath --path --mixed "$APP_HOME"`
132 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
133 |
134 | JAVACMD=`cygpath --unix "$JAVACMD"`
135 |
136 | # We build the pattern for arguments to be converted via cygpath
137 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
138 | SEP=""
139 | for dir in $ROOTDIRSRAW ; do
140 | ROOTDIRS="$ROOTDIRS$SEP$dir"
141 | SEP="|"
142 | done
143 | OURCYGPATTERN="(^($ROOTDIRS))"
144 | # Add a user-defined pattern to the cygpath arguments
145 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then
146 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
147 | fi
148 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
149 | i=0
150 | for arg in "$@" ; do
151 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
152 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
153 |
154 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
155 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
156 | else
157 | eval `echo args$i`="\"$arg\""
158 | fi
159 | i=`expr $i + 1`
160 | done
161 | case $i in
162 | 0) set -- ;;
163 | 1) set -- "$args0" ;;
164 | 2) set -- "$args0" "$args1" ;;
165 | 3) set -- "$args0" "$args1" "$args2" ;;
166 | 4) set -- "$args0" "$args1" "$args2" "$args3" ;;
167 | 5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
168 | 6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
169 | 7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
170 | 8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
171 | 9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
172 | esac
173 | fi
174 |
175 | # Escape application args
176 | save () {
177 | for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
178 | echo " "
179 | }
180 | APP_ARGS=`save "$@"`
181 |
182 | # Collect all arguments for the kotlin command, following the shell quoting and substitution rules
183 | eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
184 |
185 | exec "$JAVACMD" "$@"
186 |
--------------------------------------------------------------------------------
/app/src/main/java/com/google/ar/core/examples/kotlin/common/rendering/PointCloudRenderer.kt:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Google LLC
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.google.ar.core.examples.kotlin.common.rendering
17 |
18 | import android.content.Context
19 | import android.opengl.GLES20
20 | import android.opengl.Matrix
21 | import com.google.ar.core.PointCloud
22 | import java.io.IOException
23 |
24 | /** Renders a point cloud. */
25 | class PointCloudRenderer {
26 | private var vbo = 0
27 | private var vboSize = 0
28 | private var programName = 0
29 | private var positionAttribute = 0
30 | private var modelViewProjectionUniform = 0
31 | private var colorUniform = 0
32 | private var pointSizeUniform = 0
33 | private var numPoints = 0
34 |
35 | // Keep track of the last point cloud rendered to avoid updating the VBO if point cloud
36 | // was not changed. Do this using the timestamp since we can't compare PointCloud objects.
37 | private var lastTimestamp: Long = 0
38 |
39 | /**
40 | * Allocates and initializes OpenGL resources needed by the plane renderer. Must be called on the
41 | * OpenGL thread, typically in [GLSurfaceView.Renderer.onSurfaceCreated].
42 | *
43 | * @param context Needed to access shader source.
44 | */
45 | @Throws(IOException::class)
46 | fun createOnGlThread(context: Context?) {
47 | ShaderUtil.checkGLError(TAG, "before create")
48 | val buffers = IntArray(1)
49 | GLES20.glGenBuffers(1, buffers, 0)
50 | vbo = buffers[0]
51 | GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vbo)
52 | vboSize = INITIAL_BUFFER_POINTS * BYTES_PER_POINT
53 | GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, vboSize, null, GLES20.GL_DYNAMIC_DRAW)
54 | GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0)
55 | ShaderUtil.checkGLError(TAG, "buffer alloc")
56 | val vertexShader = ShaderUtil.loadGLShader(TAG, context!!, GLES20.GL_VERTEX_SHADER, VERTEX_SHADER_NAME)
57 | val passthroughShader = ShaderUtil.loadGLShader(TAG, context, GLES20.GL_FRAGMENT_SHADER, FRAGMENT_SHADER_NAME)
58 | programName = GLES20.glCreateProgram()
59 | GLES20.glAttachShader(programName, vertexShader)
60 | GLES20.glAttachShader(programName, passthroughShader)
61 | GLES20.glLinkProgram(programName)
62 | GLES20.glUseProgram(programName)
63 | ShaderUtil.checkGLError(TAG, "program")
64 | positionAttribute = GLES20.glGetAttribLocation(programName, "a_Position")
65 | colorUniform = GLES20.glGetUniformLocation(programName, "u_Color")
66 | modelViewProjectionUniform = GLES20.glGetUniformLocation(programName, "u_ModelViewProjection")
67 | pointSizeUniform = GLES20.glGetUniformLocation(programName, "u_PointSize")
68 | ShaderUtil.checkGLError(TAG, "program params")
69 | }
70 |
71 | /**
72 | * Updates the OpenGL buffer contents to the provided point. Repeated calls with the same point
73 | * cloud will be ignored.
74 | */
75 | fun update(cloud: PointCloud) {
76 | if (cloud.timestamp == lastTimestamp) {
77 | // Redundant call.
78 | return
79 | }
80 | ShaderUtil.checkGLError(TAG, "before update")
81 | GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vbo)
82 | lastTimestamp = cloud.timestamp
83 |
84 | // If the VBO is not large enough to fit the new point cloud, resize it.
85 | numPoints = cloud.points.remaining() / FLOATS_PER_POINT
86 | if (numPoints * BYTES_PER_POINT > vboSize) {
87 | while (numPoints * BYTES_PER_POINT > vboSize) {
88 | vboSize *= 2
89 | }
90 | GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, vboSize, null, GLES20.GL_DYNAMIC_DRAW)
91 | }
92 | GLES20.glBufferSubData(
93 | GLES20.GL_ARRAY_BUFFER, 0, numPoints * BYTES_PER_POINT, cloud.points)
94 | GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0)
95 | ShaderUtil.checkGLError(TAG, "after update")
96 | }
97 |
98 | /**
99 | * Renders the point cloud. ARCore point cloud is given in world space.
100 | *
101 | * @param cameraView the camera view matrix for this frame, typically from [ ][com.google.ar.core.Camera.getViewMatrix].
102 | * @param cameraPerspective the camera projection matrix for this frame, typically from [ ][com.google.ar.core.Camera.getProjectionMatrix].
103 | */
104 | fun draw(cameraView: FloatArray?, cameraPerspective: FloatArray?) {
105 | val modelViewProjection = FloatArray(16)
106 | Matrix.multiplyMM(modelViewProjection, 0, cameraPerspective, 0, cameraView, 0)
107 | ShaderUtil.checkGLError(TAG, "Before draw")
108 | GLES20.glUseProgram(programName)
109 | GLES20.glEnableVertexAttribArray(positionAttribute)
110 | GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vbo)
111 | GLES20.glVertexAttribPointer(positionAttribute, 4, GLES20.GL_FLOAT, false, BYTES_PER_POINT, 0)
112 | GLES20.glUniform4f(colorUniform, 31.0f / 255.0f, 188.0f / 255.0f, 210.0f / 255.0f, 1.0f)
113 | GLES20.glUniformMatrix4fv(modelViewProjectionUniform, 1, false, modelViewProjection, 0)
114 | GLES20.glUniform1f(pointSizeUniform, 5.0f)
115 | GLES20.glDrawArrays(GLES20.GL_POINTS, 0, numPoints)
116 | GLES20.glDisableVertexAttribArray(positionAttribute)
117 | GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0)
118 | ShaderUtil.checkGLError(TAG, "Draw")
119 | }
120 |
121 | companion object {
122 | private val TAG = PointCloud::class.java.simpleName
123 |
124 | // Shader names.
125 | private const val VERTEX_SHADER_NAME = "shaders/point_cloud.vert"
126 | private const val FRAGMENT_SHADER_NAME = "shaders/point_cloud.frag"
127 | private const val BYTES_PER_FLOAT = java.lang.Float.SIZE / 8
128 | private const val FLOATS_PER_POINT = 4 // X,Y,Z,confidence.
129 | private const val BYTES_PER_POINT = BYTES_PER_FLOAT * FLOATS_PER_POINT
130 | private const val INITIAL_BUFFER_POINTS = 1000
131 | }
132 | }
--------------------------------------------------------------------------------
/app/src/main/assets/shaders/ar_object.frag:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Google LLC
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | precision mediump float;
18 |
19 | uniform sampler2D u_Texture;
20 |
21 | uniform vec4 u_LightingParameters;
22 | uniform vec4 u_MaterialParameters;
23 | uniform vec4 u_ColorCorrectionParameters;
24 |
25 | #if USE_DEPTH_FOR_OCCLUSION
26 | uniform sampler2D u_DepthTexture;
27 | uniform mat3 u_DepthUvTransform;
28 | uniform float u_DepthAspectRatio;
29 | #endif // USE_DEPTH_FOR_OCCLUSION
30 |
31 | varying vec3 v_ViewPosition;
32 | varying vec3 v_ViewNormal;
33 | varying vec2 v_TexCoord;
34 | varying vec3 v_ScreenSpacePosition;
35 | uniform vec4 u_ObjColor;
36 |
37 | #if USE_DEPTH_FOR_OCCLUSION
38 |
39 | float DepthGetMillimeters(in sampler2D depth_texture, in vec2 depth_uv) {
40 | // Depth is packed into the red and green components of its texture.
41 | // The texture is a normalized format, storing millimeters.
42 | vec3 packedDepthAndVisibility = texture2D(depth_texture, depth_uv).xyz;
43 | return dot(packedDepthAndVisibility.xy, vec2(255.0, 256.0 * 255.0));
44 | }
45 |
46 | // Returns linear interpolation position of value between min and max bounds.
47 | // E.g., DepthInverseLerp(1100, 1000, 2000) returns 0.1.
48 | float DepthInverseLerp(in float value, in float min_bound, in float max_bound) {
49 | return clamp((value - min_bound) / (max_bound - min_bound), 0.0, 1.0);
50 | }
51 |
52 | // Returns a value between 0.0 (not visible) and 1.0 (completely visible)
53 | // Which represents how visible or occluded is the pixel in relation to the
54 | // depth map.
55 | float DepthGetVisibility(in sampler2D depth_texture, in vec2 depth_uv,
56 | in float asset_depth_mm) {
57 | float depth_mm = DepthGetMillimeters(depth_texture, depth_uv);
58 |
59 | // Instead of a hard z-buffer test, allow the asset to fade into the
60 | // background along a 2 * kDepthTolerancePerMm * asset_depth_mm
61 | // range centered on the background depth.
62 | const float kDepthTolerancePerMm = 0.015;
63 | float visibility_occlusion = clamp(0.5 * (depth_mm - asset_depth_mm) /
64 | (kDepthTolerancePerMm * asset_depth_mm) + 0.5, 0.0, 1.0);
65 |
66 | // Depth close to zero is most likely invalid, do not use it for occlusions.
67 | float visibility_depth_near = 1.0 - DepthInverseLerp(
68 | depth_mm, /*min_depth_mm=*/150.0, /*max_depth_mm=*/200.0);
69 |
70 | // Same for very high depth values.
71 | float visibility_depth_far = DepthInverseLerp(
72 | depth_mm, /*min_depth_mm=*/7500.0, /*max_depth_mm=*/8000.0);
73 |
74 | const float kOcclusionAlpha = 0.0;
75 | float visibility =
76 | max(max(visibility_occlusion, kOcclusionAlpha),
77 | max(visibility_depth_near, visibility_depth_far));
78 |
79 | return visibility;
80 | }
81 |
82 | float DepthGetBlurredVisibilityAroundUV(in sampler2D depth_texture, in vec2 uv,
83 | in float asset_depth_mm) {
84 | // Kernel used:
85 | // 0 4 7 4 0
86 | // 4 16 26 16 4
87 | // 7 26 41 26 7
88 | // 4 16 26 16 4
89 | // 0 4 7 4 0
90 | const float kKernelTotalWeights = 269.0;
91 | float sum = 0.0;
92 |
93 | const float kOcclusionBlurAmount = 0.01;
94 | vec2 blurriness = vec2(kOcclusionBlurAmount,
95 | kOcclusionBlurAmount * u_DepthAspectRatio);
96 |
97 | float current = 0.0;
98 |
99 | current += DepthGetVisibility(depth_texture, uv + vec2(-1.0, -2.0) * blurriness, asset_depth_mm);
100 | current += DepthGetVisibility(depth_texture, uv + vec2(+1.0, -2.0) * blurriness, asset_depth_mm);
101 | current += DepthGetVisibility(depth_texture, uv + vec2(-1.0, +2.0) * blurriness, asset_depth_mm);
102 | current += DepthGetVisibility(depth_texture, uv + vec2(+1.0, +2.0) * blurriness, asset_depth_mm);
103 | current += DepthGetVisibility(depth_texture, uv + vec2(-2.0, +1.0) * blurriness, asset_depth_mm);
104 | current += DepthGetVisibility(depth_texture, uv + vec2(+2.0, +1.0) * blurriness, asset_depth_mm);
105 | current += DepthGetVisibility(depth_texture, uv + vec2(-2.0, -1.0) * blurriness, asset_depth_mm);
106 | current += DepthGetVisibility(depth_texture, uv + vec2(+2.0, -1.0) * blurriness, asset_depth_mm);
107 | sum += current * 4.0;
108 |
109 | current = 0.0;
110 | current += DepthGetVisibility(depth_texture, uv + vec2(-2.0, -0.0) * blurriness, asset_depth_mm);
111 | current += DepthGetVisibility(depth_texture, uv + vec2(+2.0, +0.0) * blurriness, asset_depth_mm);
112 | current += DepthGetVisibility(depth_texture, uv + vec2(+0.0, +2.0) * blurriness, asset_depth_mm);
113 | current += DepthGetVisibility(depth_texture, uv + vec2(-0.0, -2.0) * blurriness, asset_depth_mm);
114 | sum += current * 7.0;
115 |
116 | current = 0.0;
117 | current += DepthGetVisibility(depth_texture, uv + vec2(-1.0, -1.0) * blurriness, asset_depth_mm);
118 | current += DepthGetVisibility(depth_texture, uv + vec2(+1.0, -1.0) * blurriness, asset_depth_mm);
119 | current += DepthGetVisibility(depth_texture, uv + vec2(-1.0, +1.0) * blurriness, asset_depth_mm);
120 | current += DepthGetVisibility(depth_texture, uv + vec2(+1.0, +1.0) * blurriness, asset_depth_mm);
121 | sum += current * 16.0;
122 |
123 | current = 0.0;
124 | current += DepthGetVisibility(depth_texture, uv + vec2(+0.0, +1.0) * blurriness, asset_depth_mm);
125 | current += DepthGetVisibility(depth_texture, uv + vec2(-0.0, -1.0) * blurriness, asset_depth_mm);
126 | current += DepthGetVisibility(depth_texture, uv + vec2(-1.0, -0.0) * blurriness, asset_depth_mm);
127 | current += DepthGetVisibility(depth_texture, uv + vec2(+1.0, +0.0) * blurriness, asset_depth_mm);
128 | sum += current * 26.0;
129 |
130 | sum += DepthGetVisibility(depth_texture, uv , asset_depth_mm) * 41.0;
131 |
132 | return sum / kKernelTotalWeights;
133 | }
134 |
135 | #endif // USE_DEPTH_FOR_OCCLUSION
136 |
137 | void main() {
138 | // We support approximate sRGB gamma.
139 | const float kGamma = 0.4545454;
140 | const float kInverseGamma = 2.2;
141 | const float kMiddleGrayGamma = 0.466;
142 |
143 | // Unpack lighting and material parameters for better naming.
144 | vec3 viewLightDirection = u_LightingParameters.xyz;
145 | vec3 colorShift = u_ColorCorrectionParameters.rgb;
146 | float averagePixelIntensity = u_ColorCorrectionParameters.a;
147 |
148 | float materialAmbient = u_MaterialParameters.x;
149 | float materialDiffuse = u_MaterialParameters.y;
150 | float materialSpecular = u_MaterialParameters.z;
151 | float materialSpecularPower = u_MaterialParameters.w;
152 |
153 | // Normalize varying parameters, because they are linearly interpolated in the vertex shader.
154 | vec3 viewFragmentDirection = normalize(v_ViewPosition);
155 | vec3 viewNormal = normalize(v_ViewNormal);
156 |
157 | // Flip the y-texture coordinate to address the texture from top-left.
158 | vec4 objectColor = texture2D(u_Texture, vec2(v_TexCoord.x, 1.0 - v_TexCoord.y));
159 |
160 | // Apply color to grayscale image only if the alpha of u_ObjColor is
161 | // greater and equal to 255.0.
162 | objectColor.rgb *= mix(vec3(1.0), u_ObjColor.rgb / 255.0,
163 | step(255.0, u_ObjColor.a));
164 |
165 | // Apply inverse SRGB gamma to the texture before making lighting calculations.
166 | objectColor.rgb = pow(objectColor.rgb, vec3(kInverseGamma));
167 |
168 | // Ambient light is unaffected by the light intensity.
169 | float ambient = materialAmbient;
170 |
171 | // Approximate a hemisphere light (not a harsh directional light).
172 | float diffuse = materialDiffuse *
173 | 0.5 * (dot(viewNormal, viewLightDirection) + 1.0);
174 |
175 | // Compute specular light. Textures are loaded with with premultiplied alpha
176 | // (https://developer.android.com/reference/android/graphics/BitmapFactory.Options#inPremultiplied),
177 | // so premultiply the specular color by alpha as well.
178 | vec3 reflectedLightDirection = reflect(viewLightDirection, viewNormal);
179 | float specularStrength = max(0.0, dot(viewFragmentDirection, reflectedLightDirection));
180 | float specular = objectColor.a * materialSpecular *
181 | pow(specularStrength, materialSpecularPower);
182 |
183 | vec3 color = objectColor.rgb * (ambient + diffuse) + specular;
184 | // Apply SRGB gamma before writing the fragment color.
185 | color.rgb = pow(color, vec3(kGamma));
186 | // Apply average pixel intensity and color shift
187 | color *= colorShift * (averagePixelIntensity / kMiddleGrayGamma);
188 | gl_FragColor.rgb = color;
189 | gl_FragColor.a = objectColor.a;
190 |
191 | #if USE_DEPTH_FOR_OCCLUSION
192 | const float kMetersToMillimeters = 1000.0;
193 | float asset_depth_mm = v_ViewPosition.z * kMetersToMillimeters * -1.;
194 | // Computes the texture coordinates to sample from the depth image.
195 | vec2 depth_uvs = (u_DepthUvTransform * vec3(v_ScreenSpacePosition.xy, 1)).xy;
196 |
197 | // The following step is very costly. Replace the last line with the
198 | // commented line if it's too expensive.
199 | // gl_FragColor *= DepthGetVisibility(u_DepthTexture, depth_uvs, asset_depth_mm);
200 | gl_FragColor *= DepthGetBlurredVisibilityAroundUV(u_DepthTexture, depth_uvs, asset_depth_mm);
201 | #endif // USE_DEPTH_FOR_OCCLUSION
202 | }
203 |
--------------------------------------------------------------------------------
/app/src/main/java/com/google/ar/core/examples/kotlin/common/rendering/BackgroundRenderer.kt:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Google LLC
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.google.ar.core.examples.kotlin.common.rendering
17 |
18 | import android.content.Context
19 | import android.opengl.GLES11Ext
20 | import android.opengl.GLES20
21 | import com.google.ar.core.Coordinates2d
22 | import com.google.ar.core.Frame
23 | import java.io.IOException
24 | import java.nio.ByteBuffer
25 | import java.nio.ByteOrder
26 | import java.nio.FloatBuffer
27 |
28 | /**
29 | * This class renders the AR background from camera feed. It creates and hosts the texture given to
30 | * ARCore to be filled with the camera image.
31 | */
32 | class BackgroundRenderer {
33 | private var quadCoords: FloatBuffer? = null
34 | private var quadTexCoords: FloatBuffer? = null
35 | private var cameraProgram = 0
36 | private var depthProgram = 0
37 | private var cameraPositionAttrib = 0
38 | private var cameraTexCoordAttrib = 0
39 | private var cameraTextureUniform = 0
40 | var textureId = -1
41 | private set
42 | private var suppressTimestampZeroRendering = true
43 | private var depthPositionAttrib = 0
44 | private var depthTexCoordAttrib = 0
45 | private var depthTextureUniform = 0
46 | private var depthTextureId = -1
47 |
48 | /**
49 | * Allocates and initializes OpenGL resources needed by the background renderer. Must be called on
50 | * the OpenGL thread, typically in [GLSurfaceView.Renderer.onSurfaceCreated].
51 | *
52 | * @param context Needed to access shader source.
53 | */
54 | @JvmOverloads
55 | @Throws(IOException::class)
56 | fun createOnGlThread(context: Context?, depthTextureId: Int = /*depthTextureId=*/-1) {
57 | // Generate the background texture.
58 | val textures = IntArray(1)
59 | GLES20.glGenTextures(1, textures, 0)
60 | textureId = textures[0]
61 | val textureTarget = GLES11Ext.GL_TEXTURE_EXTERNAL_OES
62 | GLES20.glBindTexture(textureTarget, textureId)
63 | GLES20.glTexParameteri(textureTarget, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE)
64 | GLES20.glTexParameteri(textureTarget, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE)
65 | GLES20.glTexParameteri(textureTarget, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR)
66 | GLES20.glTexParameteri(textureTarget, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR)
67 | val numVertices = 4
68 | if (numVertices != QUAD_COORDS.size / COORDS_PER_VERTEX) {
69 | throw RuntimeException("Unexpected number of vertices in BackgroundRenderer.")
70 | }
71 | val bbCoords = ByteBuffer.allocateDirect(QUAD_COORDS.size * FLOAT_SIZE)
72 | bbCoords.order(ByteOrder.nativeOrder())
73 | quadCoords = bbCoords.asFloatBuffer().apply {
74 | put(QUAD_COORDS)
75 | position(0)
76 | }
77 |
78 | val bbTexCoordsTransformed = ByteBuffer.allocateDirect(numVertices * TEXCOORDS_PER_VERTEX * FLOAT_SIZE)
79 | bbTexCoordsTransformed.order(ByteOrder.nativeOrder())
80 | quadTexCoords = bbTexCoordsTransformed.asFloatBuffer()
81 |
82 | // Load render camera feed shader.
83 | run {
84 | val vertexShader = ShaderUtil.loadGLShader(TAG, context!!, GLES20.GL_VERTEX_SHADER, CAMERA_VERTEX_SHADER_NAME)
85 | val fragmentShader = ShaderUtil.loadGLShader(
86 | TAG, context!!, GLES20.GL_FRAGMENT_SHADER, CAMERA_FRAGMENT_SHADER_NAME)
87 | cameraProgram = GLES20.glCreateProgram()
88 | GLES20.glAttachShader(cameraProgram, vertexShader)
89 | GLES20.glAttachShader(cameraProgram, fragmentShader)
90 | GLES20.glLinkProgram(cameraProgram)
91 | GLES20.glUseProgram(cameraProgram)
92 | cameraPositionAttrib = GLES20.glGetAttribLocation(cameraProgram, "a_Position")
93 | cameraTexCoordAttrib = GLES20.glGetAttribLocation(cameraProgram, "a_TexCoord")
94 | ShaderUtil.checkGLError(TAG, "Program creation")
95 | cameraTextureUniform = GLES20.glGetUniformLocation(cameraProgram, "sTexture")
96 | ShaderUtil.checkGLError(TAG, "Program parameters")
97 | }
98 |
99 | // Load render depth map shader.
100 | run {
101 | val vertexShader = ShaderUtil.loadGLShader(
102 | TAG, context!!, GLES20.GL_VERTEX_SHADER, DEPTH_VISUALIZER_VERTEX_SHADER_NAME)
103 | val fragmentShader = ShaderUtil.loadGLShader(
104 | TAG, context, GLES20.GL_FRAGMENT_SHADER, DEPTH_VISUALIZER_FRAGMENT_SHADER_NAME)
105 | depthProgram = GLES20.glCreateProgram()
106 | GLES20.glAttachShader(depthProgram, vertexShader)
107 | GLES20.glAttachShader(depthProgram, fragmentShader)
108 | GLES20.glLinkProgram(depthProgram)
109 | GLES20.glUseProgram(depthProgram)
110 | depthPositionAttrib = GLES20.glGetAttribLocation(depthProgram, "a_Position")
111 | depthTexCoordAttrib = GLES20.glGetAttribLocation(depthProgram, "a_TexCoord")
112 | ShaderUtil.checkGLError(TAG, "Program creation")
113 | depthTextureUniform = GLES20.glGetUniformLocation(depthProgram, "u_DepthTexture")
114 | ShaderUtil.checkGLError(TAG, "Program parameters")
115 | }
116 | this.depthTextureId = depthTextureId
117 | }
118 |
119 | fun suppressTimestampZeroRendering(suppressTimestampZeroRendering: Boolean) {
120 | this.suppressTimestampZeroRendering = suppressTimestampZeroRendering
121 | }
122 |
123 | /**
124 | * Draws the AR background image. The image will be drawn such that virtual content rendered with
125 | * the matrices provided by [com.google.ar.core.Camera.getViewMatrix] and
126 | * [com.google.ar.core.Camera.getProjectionMatrix] will
127 | * accurately follow static physical objects. This must be called **before** drawing virtual
128 | * content.
129 | *
130 | * @param frame The current `Frame` as returned by [Session.update].
131 | * @param debugShowDepthMap Toggles whether to show the live camera feed or latest depth image.
132 | */
133 | @JvmOverloads
134 | fun draw(frame: Frame, debugShowDepthMap: Boolean = /*debugShowDepthMap=*/false) {
135 | // If display rotation changed (also includes view size change), we need to re-query the uv
136 | // coordinates for the screen rect, as they may have changed as well.
137 | if (frame.hasDisplayGeometryChanged()) {
138 | frame.transformCoordinates2d(
139 | Coordinates2d.OPENGL_NORMALIZED_DEVICE_COORDINATES,
140 | quadCoords,
141 | Coordinates2d.TEXTURE_NORMALIZED,
142 | quadTexCoords)
143 | }
144 | if (frame.timestamp == 0L && suppressTimestampZeroRendering) {
145 | // Suppress rendering if the camera did not produce the first frame yet. This is to avoid
146 | // drawing possible leftover data from previous sessions if the texture is reused.
147 | return
148 | }
149 | draw(debugShowDepthMap)
150 | }
151 |
152 | /**
153 | * Draws the camera image using the currently configured [BackgroundRenderer.quadTexCoords]
154 | * image texture coordinates.
155 | *
156 | *
157 | * The image will be center cropped if the camera sensor aspect ratio does not match the screen
158 | * aspect ratio, which matches the cropping behavior of [ ][Frame.transformCoordinates2d].
159 | */
160 | fun draw(
161 | imageWidth: Int, imageHeight: Int, screenAspectRatio: Float, cameraToDisplayRotation: Int) {
162 | // Crop the camera image to fit the screen aspect ratio.
163 | val imageAspectRatio = imageWidth.toFloat() / imageHeight
164 | val croppedWidth: Float
165 | val croppedHeight: Float
166 | if (screenAspectRatio < imageAspectRatio) {
167 | croppedWidth = imageHeight * screenAspectRatio
168 | croppedHeight = imageHeight.toFloat()
169 | } else {
170 | croppedWidth = imageWidth.toFloat()
171 | croppedHeight = imageWidth / screenAspectRatio
172 | }
173 | val u = (imageWidth - croppedWidth) / imageWidth * 0.5f
174 | val v = (imageHeight - croppedHeight) / imageHeight * 0.5f
175 | val texCoordTransformed: FloatArray
176 | texCoordTransformed = when (cameraToDisplayRotation) {
177 | 90 -> floatArrayOf(1 - u, 1 - v, 1 - u, v, u, 1 - v, u, v)
178 | 180 -> floatArrayOf(1 - u, v, u, v, 1 - u, 1 - v, u, 1 - v)
179 | 270 -> floatArrayOf(u, v, u, 1 - v, 1 - u, v, 1 - u, 1 - v)
180 | 0 -> floatArrayOf(u, 1 - v, 1 - u, 1 - v, u, v, 1 - u, v)
181 | else -> throw IllegalArgumentException("Unhandled rotation: $cameraToDisplayRotation")
182 | }
183 |
184 | // Write image texture coordinates.
185 | quadTexCoords!!.position(0)
186 | quadTexCoords!!.put(texCoordTransformed)
187 | draw( /*debugShowDepthMap=*/false)
188 | }
189 |
190 | /**
191 | * Draws the camera background image using the currently configured [ ][BackgroundRenderer.quadTexCoords] image texture coordinates.
192 | */
193 | private fun draw(debugShowDepthMap: Boolean) {
194 | // Ensure position is rewound before use.
195 | quadTexCoords!!.position(0)
196 |
197 | // No need to test or write depth, the screen quad has arbitrary depth, and is expected
198 | // to be drawn first.
199 | GLES20.glDisable(GLES20.GL_DEPTH_TEST)
200 | GLES20.glDepthMask(false)
201 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0)
202 | if (debugShowDepthMap) {
203 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, depthTextureId)
204 | GLES20.glUseProgram(depthProgram)
205 | GLES20.glUniform1i(depthTextureUniform, 0)
206 |
207 | // Set the vertex positions and texture coordinates.
208 | GLES20.glVertexAttribPointer(
209 | depthPositionAttrib, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, 0, quadCoords)
210 | GLES20.glVertexAttribPointer(
211 | depthTexCoordAttrib, TEXCOORDS_PER_VERTEX, GLES20.GL_FLOAT, false, 0, quadTexCoords)
212 | GLES20.glEnableVertexAttribArray(depthPositionAttrib)
213 | GLES20.glEnableVertexAttribArray(depthTexCoordAttrib)
214 | } else {
215 | GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId)
216 | GLES20.glUseProgram(cameraProgram)
217 | GLES20.glUniform1i(cameraTextureUniform, 0)
218 |
219 | // Set the vertex positions and texture coordinates.
220 | GLES20.glVertexAttribPointer(
221 | cameraPositionAttrib, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, 0, quadCoords)
222 | GLES20.glVertexAttribPointer(
223 | cameraTexCoordAttrib, TEXCOORDS_PER_VERTEX, GLES20.GL_FLOAT, false, 0, quadTexCoords)
224 | GLES20.glEnableVertexAttribArray(cameraPositionAttrib)
225 | GLES20.glEnableVertexAttribArray(cameraTexCoordAttrib)
226 | }
227 | GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4)
228 |
229 | // Disable vertex arrays
230 | if (debugShowDepthMap) {
231 | GLES20.glDisableVertexAttribArray(depthPositionAttrib)
232 | GLES20.glDisableVertexAttribArray(depthTexCoordAttrib)
233 | } else {
234 | GLES20.glDisableVertexAttribArray(cameraPositionAttrib)
235 | GLES20.glDisableVertexAttribArray(cameraTexCoordAttrib)
236 | }
237 |
238 | // Restore the depth state for further drawing.
239 | GLES20.glDepthMask(true)
240 | GLES20.glEnable(GLES20.GL_DEPTH_TEST)
241 | ShaderUtil.checkGLError(TAG, "BackgroundRendererDraw")
242 | }
243 |
244 | companion object {
245 | private val TAG = BackgroundRenderer::class.java.simpleName
246 |
247 | // Shader names.
248 | private const val CAMERA_VERTEX_SHADER_NAME = "shaders/screenquad.vert"
249 | private const val CAMERA_FRAGMENT_SHADER_NAME = "shaders/screenquad.frag"
250 | private const val DEPTH_VISUALIZER_VERTEX_SHADER_NAME = "shaders/background_show_depth_color_visualization.vert"
251 | private const val DEPTH_VISUALIZER_FRAGMENT_SHADER_NAME = "shaders/background_show_depth_color_visualization.frag"
252 | private const val COORDS_PER_VERTEX = 2
253 | private const val TEXCOORDS_PER_VERTEX = 2
254 | private const val FLOAT_SIZE = 4
255 |
256 | /**
257 | * (-1, 1) ------- (1, 1)
258 | * | \ |
259 | * | \ |
260 | * | \ |
261 | * | \ |
262 | * (-1, -1) ------ (1, -1)
263 | * Ensure triangles are front-facing, to support glCullFace().
264 | * This quad will be drawn using GL_TRIANGLE_STRIP which draws two
265 | * triangles: v0->v1->v2, then v2->v1->v3.
266 | */
267 | private val QUAD_COORDS = floatArrayOf(
268 | -1.0f, -1.0f, +1.0f, -1.0f, -1.0f, +1.0f, +1.0f, +1.0f)
269 | }
270 | }
--------------------------------------------------------------------------------
/app/src/main/java/com/google/ar/core/examples/kotlin/common/rendering/PlaneRenderer.java:
--------------------------------------------------------------------------------
1 | package com.google.ar.core.examples.kotlin.common.rendering;
2 |
3 |
4 |
5 | import android.content.Context;
6 | import android.graphics.Bitmap;
7 | import android.graphics.BitmapFactory;
8 | import android.opengl.GLES20;
9 | import android.opengl.GLUtils;
10 | import android.opengl.Matrix;
11 | import com.google.ar.core.Camera;
12 | import com.google.ar.core.Plane;
13 | import com.google.ar.core.Pose;
14 | import com.google.ar.core.TrackingState;
15 | import java.io.IOException;
16 | import java.nio.ByteBuffer;
17 | import java.nio.ByteOrder;
18 | import java.nio.FloatBuffer;
19 | import java.nio.ShortBuffer;
20 | import java.util.ArrayList;
21 | import java.util.Collection;
22 | import java.util.Collections;
23 | import java.util.Comparator;
24 | import java.util.HashMap;
25 | import java.util.List;
26 | import java.util.Map;
27 |
28 | /** Renders the detected AR planes. */
29 | public class PlaneRenderer {
30 | private static final String TAG = PlaneRenderer.class.getSimpleName();
31 |
32 | // Shader names.
33 | private static final String VERTEX_SHADER_NAME = "shaders/plane.vert";
34 | private static final String FRAGMENT_SHADER_NAME = "shaders/plane.frag";
35 |
36 | private static final int BYTES_PER_FLOAT = Float.SIZE / 8;
37 | private static final int BYTES_PER_SHORT = Short.SIZE / 8;
38 | private static final int COORDS_PER_VERTEX = 3; // x, z, alpha
39 |
40 | private static final int VERTS_PER_BOUNDARY_VERT = 2;
41 | private static final int INDICES_PER_BOUNDARY_VERT = 3;
42 | private static final int INITIAL_BUFFER_BOUNDARY_VERTS = 64;
43 |
44 | private static final int INITIAL_VERTEX_BUFFER_SIZE_BYTES =
45 | BYTES_PER_FLOAT * COORDS_PER_VERTEX * VERTS_PER_BOUNDARY_VERT * INITIAL_BUFFER_BOUNDARY_VERTS;
46 |
47 | private static final int INITIAL_INDEX_BUFFER_SIZE_BYTES =
48 | BYTES_PER_SHORT
49 | * INDICES_PER_BOUNDARY_VERT
50 | * INDICES_PER_BOUNDARY_VERT
51 | * INITIAL_BUFFER_BOUNDARY_VERTS;
52 |
53 | private static final float FADE_RADIUS_M = 0.25f;
54 | private static final float DOTS_PER_METER = 10.0f;
55 | private static final float EQUILATERAL_TRIANGLE_SCALE = (float) (1 / Math.sqrt(3));
56 |
57 | // Using the "signed distance field" approach to render sharp lines and circles.
58 | // {dotThreshold, lineThreshold, lineFadeSpeed, occlusionScale}
59 | // dotThreshold/lineThreshold: red/green intensity above which dots/lines are present
60 | // lineFadeShrink: lines will fade in between alpha = 1-(1/lineFadeShrink) and 1.0
61 | // occlusionShrink: occluded planes will fade out between alpha = 0 and 1/occlusionShrink
62 | private static final float[] GRID_CONTROL = {0.2f, 0.4f, 2.0f, 1.5f};
63 |
64 | private int planeProgram;
65 | private final int[] textures = new int[1];
66 |
67 | private int planeXZPositionAlphaAttribute;
68 |
69 | private int planeModelUniform;
70 | private int planeNormalUniform;
71 | private int planeModelViewProjectionUniform;
72 | private int textureUniform;
73 | private int gridControlUniform;
74 | private int planeUvMatrixUniform;
75 |
76 | private FloatBuffer vertexBuffer =
77 | ByteBuffer.allocateDirect(INITIAL_VERTEX_BUFFER_SIZE_BYTES)
78 | .order(ByteOrder.nativeOrder())
79 | .asFloatBuffer();
80 | private ShortBuffer indexBuffer =
81 | ByteBuffer.allocateDirect(INITIAL_INDEX_BUFFER_SIZE_BYTES)
82 | .order(ByteOrder.nativeOrder())
83 | .asShortBuffer();
84 |
85 | // Temporary lists/matrices allocated here to reduce number of allocations for each frame.
86 | private final float[] modelMatrix = new float[16];
87 | private final float[] modelViewMatrix = new float[16];
88 | private final float[] modelViewProjectionMatrix = new float[16];
89 | private final float[] planeAngleUvMatrix =
90 | new float[4]; // 2x2 rotation matrix applied to uv coords.
91 |
92 | private final Map planeIndexMap = new HashMap<>();
93 |
94 | public PlaneRenderer() {}
95 |
96 | /**
97 | * Allocates and initializes OpenGL resources needed by the plane renderer. Must be called on the
98 |
99 | *
100 | * @param context Needed to access shader source and texture PNG.
101 | * @param gridDistanceTextureName Name of the PNG file containing the grid texture.
102 | */
103 | public void createOnGlThread(Context context, String gridDistanceTextureName) throws IOException {
104 | int vertexShader =
105 | ShaderUtil.loadGLShader(TAG, context, GLES20.GL_VERTEX_SHADER, VERTEX_SHADER_NAME);
106 | int passthroughShader =
107 | ShaderUtil.loadGLShader(TAG, context, GLES20.GL_FRAGMENT_SHADER, FRAGMENT_SHADER_NAME);
108 |
109 | planeProgram = GLES20.glCreateProgram();
110 | GLES20.glAttachShader(planeProgram, vertexShader);
111 | GLES20.glAttachShader(planeProgram, passthroughShader);
112 | GLES20.glLinkProgram(planeProgram);
113 | GLES20.glUseProgram(planeProgram);
114 |
115 | ShaderUtil.checkGLError(TAG, "Program creation");
116 |
117 | // Read the texture.
118 | Bitmap textureBitmap =
119 | BitmapFactory.decodeStream(context.getAssets().open(gridDistanceTextureName));
120 |
121 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
122 | GLES20.glGenTextures(textures.length, textures, 0);
123 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
124 |
125 | GLES20.glTexParameteri(
126 | GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR_MIPMAP_LINEAR);
127 | GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
128 | GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, textureBitmap, 0);
129 | GLES20.glGenerateMipmap(GLES20.GL_TEXTURE_2D);
130 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
131 |
132 | ShaderUtil.checkGLError(TAG, "Texture loading");
133 |
134 | planeXZPositionAlphaAttribute = GLES20.glGetAttribLocation(planeProgram, "a_XZPositionAlpha");
135 |
136 | planeModelUniform = GLES20.glGetUniformLocation(planeProgram, "u_Model");
137 | planeNormalUniform = GLES20.glGetUniformLocation(planeProgram, "u_Normal");
138 | planeModelViewProjectionUniform =
139 | GLES20.glGetUniformLocation(planeProgram, "u_ModelViewProjection");
140 | textureUniform = GLES20.glGetUniformLocation(planeProgram, "u_Texture");
141 | gridControlUniform = GLES20.glGetUniformLocation(planeProgram, "u_gridControl");
142 | planeUvMatrixUniform = GLES20.glGetUniformLocation(planeProgram, "u_PlaneUvMatrix");
143 |
144 | ShaderUtil.checkGLError(TAG, "Program parameters");
145 | }
146 |
147 | /** Updates the plane model transform matrix and extents. */
148 | private void updatePlaneParameters(
149 | float[] planeMatrix, float extentX, float extentZ, FloatBuffer boundary) {
150 | System.arraycopy(planeMatrix, 0, modelMatrix, 0, 16);
151 | if (boundary == null) {
152 | vertexBuffer.limit(0);
153 | indexBuffer.limit(0);
154 | return;
155 | }
156 |
157 | // Generate a new set of vertices and a corresponding triangle strip index set so that
158 | // the plane boundary polygon has a fading edge. This is done by making a copy of the
159 | // boundary polygon vertices and scaling it down around center to push it inwards. Then
160 | // the index buffer is setup accordingly.
161 | boundary.rewind();
162 | int boundaryVertices = boundary.limit() / 2;
163 | int numVertices;
164 | int numIndices;
165 |
166 | numVertices = boundaryVertices * VERTS_PER_BOUNDARY_VERT;
167 | // drawn as GL_TRIANGLE_STRIP with 3n-2 triangles (n-2 for fill, 2n for perimeter).
168 | numIndices = boundaryVertices * INDICES_PER_BOUNDARY_VERT;
169 |
170 | if (vertexBuffer.capacity() < numVertices * COORDS_PER_VERTEX) {
171 | int size = vertexBuffer.capacity();
172 | while (size < numVertices * COORDS_PER_VERTEX) {
173 | size *= 2;
174 | }
175 | vertexBuffer =
176 | ByteBuffer.allocateDirect(BYTES_PER_FLOAT * size)
177 | .order(ByteOrder.nativeOrder())
178 | .asFloatBuffer();
179 | }
180 | vertexBuffer.rewind();
181 | vertexBuffer.limit(numVertices * COORDS_PER_VERTEX);
182 |
183 | if (indexBuffer.capacity() < numIndices) {
184 | int size = indexBuffer.capacity();
185 | while (size < numIndices) {
186 | size *= 2;
187 | }
188 | indexBuffer =
189 | ByteBuffer.allocateDirect(BYTES_PER_SHORT * size)
190 | .order(ByteOrder.nativeOrder())
191 | .asShortBuffer();
192 | }
193 | indexBuffer.rewind();
194 | indexBuffer.limit(numIndices);
195 |
196 | // Note: when either dimension of the bounding box is smaller than 2*FADE_RADIUS_M we
197 | // generate a bunch of 0-area triangles. These don't get rendered though so it works
198 | // out ok.
199 | float xScale = Math.max((extentX - 2 * FADE_RADIUS_M) / extentX, 0.0f);
200 | float zScale = Math.max((extentZ - 2 * FADE_RADIUS_M) / extentZ, 0.0f);
201 |
202 | while (boundary.hasRemaining()) {
203 | float x = boundary.get();
204 | float z = boundary.get();
205 | vertexBuffer.put(x);
206 | vertexBuffer.put(z);
207 | vertexBuffer.put(0.0f);
208 | vertexBuffer.put(x * xScale);
209 | vertexBuffer.put(z * zScale);
210 | vertexBuffer.put(1.0f);
211 | }
212 |
213 | // step 1, perimeter
214 | indexBuffer.put((short) ((boundaryVertices - 1) * 2));
215 | for (int i = 0; i < boundaryVertices; ++i) {
216 | indexBuffer.put((short) (i * 2));
217 | indexBuffer.put((short) (i * 2 + 1));
218 | }
219 | indexBuffer.put((short) 1);
220 | // This leaves us on the interior edge of the perimeter between the inset vertices
221 | // for boundary verts n-1 and 0.
222 |
223 | // step 2, interior:
224 | for (int i = 1; i < boundaryVertices / 2; ++i) {
225 | indexBuffer.put((short) ((boundaryVertices - 1 - i) * 2 + 1));
226 | indexBuffer.put((short) (i * 2 + 1));
227 | }
228 | if (boundaryVertices % 2 != 0) {
229 | indexBuffer.put((short) ((boundaryVertices / 2) * 2 + 1));
230 | }
231 | }
232 |
233 | private void draw(float[] cameraView, float[] cameraPerspective, float[] planeNormal) {
234 | // Build the ModelView and ModelViewProjection matrices
235 | // for calculating cube position and light.
236 | Matrix.multiplyMM(modelViewMatrix, 0, cameraView, 0, modelMatrix, 0);
237 | Matrix.multiplyMM(modelViewProjectionMatrix, 0, cameraPerspective, 0, modelViewMatrix, 0);
238 |
239 | // Set the position of the plane
240 | vertexBuffer.rewind();
241 | GLES20.glVertexAttribPointer(
242 | planeXZPositionAlphaAttribute,
243 | COORDS_PER_VERTEX,
244 | GLES20.GL_FLOAT,
245 | false,
246 | BYTES_PER_FLOAT * COORDS_PER_VERTEX,
247 | vertexBuffer);
248 |
249 | // Set the Model and ModelViewProjection matrices in the shader.
250 | GLES20.glUniformMatrix4fv(planeModelUniform, 1, false, modelMatrix, 0);
251 | GLES20.glUniform3f(planeNormalUniform, planeNormal[0], planeNormal[1], planeNormal[2]);
252 | GLES20.glUniformMatrix4fv(
253 | planeModelViewProjectionUniform, 1, false, modelViewProjectionMatrix, 0);
254 |
255 | indexBuffer.rewind();
256 | GLES20.glDrawElements(
257 | GLES20.GL_TRIANGLE_STRIP, indexBuffer.limit(), GLES20.GL_UNSIGNED_SHORT, indexBuffer);
258 | ShaderUtil.checkGLError(TAG, "Drawing plane");
259 | }
260 |
261 | static class SortablePlane {
262 | final float distance;
263 | final Plane plane;
264 |
265 | SortablePlane(float distance, Plane plane) {
266 | this.distance = distance;
267 | this.plane = plane;
268 | }
269 | }
270 |
271 | /**
272 | * Draws the collection of tracked planes, with closer planes hiding more distant ones.
273 | *
274 | * @param allPlanes The collection of planes to draw.
275 | * @param cameraPose The pose of the camera, as returned by {@link Camera#getPose()}
276 | * @param cameraPerspective The projection matrix, as returned by {@link
277 | * Camera#getProjectionMatrix(float[], int, float, float)}
278 | */
279 | public void drawPlanes(Collection allPlanes, Pose cameraPose, float[] cameraPerspective) {
280 | // Planes must be sorted by distance from camera so that we draw closer planes first, and
281 | // they occlude the farther planes.
282 | List sortedPlanes = new ArrayList<>();
283 |
284 | for (Plane plane : allPlanes) {
285 | if (plane.getTrackingState() != TrackingState.TRACKING || plane.getSubsumedBy() != null) {
286 | continue;
287 | }
288 |
289 | float distance = calculateDistanceToPlane(plane.getCenterPose(), cameraPose);
290 | if (distance < 0) { // Plane is back-facing.
291 | continue;
292 | }
293 | sortedPlanes.add(new SortablePlane(distance, plane));
294 | }
295 | Collections.sort(
296 | sortedPlanes,
297 | new Comparator() {
298 | @Override
299 | public int compare(SortablePlane a, SortablePlane b) {
300 | return Float.compare(b.distance, a.distance);
301 | }
302 | });
303 |
304 | float[] cameraView = new float[16];
305 | cameraPose.inverse().toMatrix(cameraView, 0);
306 |
307 | // Disable depth write.
308 | GLES20.glDepthMask(false);
309 |
310 | // Normal alpha blending with premultiplied alpha.
311 | GLES20.glEnable(GLES20.GL_BLEND);
312 | GLES20.glBlendFunc(GLES20.GL_ONE, GLES20.GL_ONE_MINUS_SRC_ALPHA);
313 |
314 | // Set up the shader.
315 | GLES20.glUseProgram(planeProgram);
316 |
317 | // Attach the texture.
318 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
319 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
320 | GLES20.glUniform1i(textureUniform, 0);
321 |
322 | // Shared fragment uniforms.
323 | GLES20.glUniform4fv(gridControlUniform, 1, GRID_CONTROL, 0);
324 |
325 | // Enable vertex arrays
326 | GLES20.glEnableVertexAttribArray(planeXZPositionAlphaAttribute);
327 |
328 | ShaderUtil.checkGLError(TAG, "Setting up to draw planes");
329 |
330 | for (SortablePlane sortedPlane : sortedPlanes) {
331 | Plane plane = sortedPlane.plane;
332 | float[] planeMatrix = new float[16];
333 | plane.getCenterPose().toMatrix(planeMatrix, 0);
334 |
335 | float[] normal = new float[3];
336 | // Get transformed Y axis of plane's coordinate system.
337 | plane.getCenterPose().getTransformedAxis(1, 1.0f, normal, 0);
338 |
339 | updatePlaneParameters(
340 | planeMatrix, plane.getExtentX(), plane.getExtentZ(), plane.getPolygon());
341 |
342 | // Get plane index. Keep a map to assign same indices to same planes.
343 | Integer planeIndex = planeIndexMap.get(plane);
344 | if (planeIndex == null) {
345 | planeIndex = planeIndexMap.size();
346 | planeIndexMap.put(plane, planeIndex);
347 | }
348 |
349 | // Each plane will have its own angle offset from others, to make them easier to
350 | // distinguish. Compute a 2x2 rotation matrix from the angle.
351 | float angleRadians = planeIndex * 0.144f;
352 | float uScale = DOTS_PER_METER;
353 | float vScale = DOTS_PER_METER * EQUILATERAL_TRIANGLE_SCALE;
354 | planeAngleUvMatrix[0] = +(float) Math.cos(angleRadians) * uScale;
355 | planeAngleUvMatrix[1] = -(float) Math.sin(angleRadians) * vScale;
356 | planeAngleUvMatrix[2] = +(float) Math.sin(angleRadians) * uScale;
357 | planeAngleUvMatrix[3] = +(float) Math.cos(angleRadians) * vScale;
358 | GLES20.glUniformMatrix2fv(planeUvMatrixUniform, 1, false, planeAngleUvMatrix, 0);
359 |
360 | draw(cameraView, cameraPerspective, normal);
361 | }
362 |
363 | // Clean up the state we set
364 | GLES20.glDisableVertexAttribArray(planeXZPositionAlphaAttribute);
365 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
366 | GLES20.glDisable(GLES20.GL_BLEND);
367 | GLES20.glDepthMask(true);
368 |
369 | ShaderUtil.checkGLError(TAG, "Cleaning up after drawing planes");
370 | }
371 |
372 | // Calculate the normal distance to plane from cameraPose, the given planePose should have y axis
373 | // parallel to plane's normal, for example plane's center pose or hit test pose.
374 | public static float calculateDistanceToPlane(Pose planePose, Pose cameraPose) {
375 | float[] normal = new float[3];
376 | float cameraX = cameraPose.tx();
377 | float cameraY = cameraPose.ty();
378 | float cameraZ = cameraPose.tz();
379 | // Get transformed Y axis of plane's coordinate system.
380 | planePose.getTransformedAxis(1, 1.0f, normal, 0);
381 | // Compute dot product of plane's normal with vector from camera to plane center.
382 | return (cameraX - planePose.tx()) * normal[0]
383 | + (cameraY - planePose.ty()) * normal[1]
384 | + (cameraZ - planePose.tz()) * normal[2];
385 | }
386 | }
--------------------------------------------------------------------------------
/app/src/main/java/com/google/ar/core/examples/kotlin/common/rendering/ObjectRenderer.kt:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Google LLC
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.google.ar.core.examples.kotlin.common.rendering
17 |
18 | import android.content.Context
19 | import android.graphics.BitmapFactory
20 | import android.opengl.GLES20
21 | import android.opengl.GLUtils
22 | import android.opengl.Matrix
23 | import de.javagl.obj.ObjData
24 | import de.javagl.obj.ObjReader
25 | import de.javagl.obj.ObjUtils
26 | import java.io.IOException
27 | import java.nio.ByteBuffer
28 | import java.nio.ByteOrder
29 | import java.util.*
30 |
31 | /** Renders an object loaded from an OBJ file in OpenGL. */
32 | class ObjectRenderer {
33 | /**
34 | * Blend mode.
35 | *
36 | * @see .setBlendMode
37 | */
38 | enum class BlendMode {
39 | /** Multiplies the destination color by the source alpha, without z-buffer writing. */
40 | Shadow,
41 |
42 | /** Normal alpha blending with z-buffer writing. */
43 | AlphaBlending
44 | }
45 |
46 | private val viewLightDirection = FloatArray(4)
47 |
48 | // Object vertex buffer variables.
49 | private var vertexBufferId = 0
50 | private var verticesBaseAddress = 0
51 | private var texCoordsBaseAddress = 0
52 | private var normalsBaseAddress = 0
53 | private var indexBufferId = 0
54 | private var indexCount = 0
55 | private var program = 0
56 | private val textures = IntArray(1)
57 |
58 | // Shader location: model view projection matrix.
59 | private var modelViewUniform = 0
60 | private var modelViewProjectionUniform = 0
61 |
62 | // Shader location: object attributes.
63 | private var positionAttribute = 0
64 | private var normalAttribute = 0
65 | private var texCoordAttribute = 0
66 |
67 | // Shader location: texture sampler.
68 | private var textureUniform = 0
69 |
70 | // Shader location: environment properties.
71 | private var lightingParametersUniform = 0
72 |
73 | // Shader location: material properties.
74 | private var materialParametersUniform = 0
75 |
76 | // Shader location: color correction property.
77 | private var colorCorrectionParameterUniform = 0
78 |
79 | // Shader location: object color property (to change the primary color of the object).
80 | private var colorUniform = 0
81 |
82 | // Shader location: depth texture.
83 | private var depthTextureUniform = 0
84 |
85 | // Shader location: transform to depth uvs.
86 | private var depthUvTransformUniform = 0
87 |
88 | // Shader location: the aspect ratio of the depth texture.
89 | private var depthAspectRatioUniform = 0
90 | private var blendMode: BlendMode? = null
91 |
92 | // Temporary matrices allocated here to reduce number of allocations for each frame.
93 | private val modelMatrix = FloatArray(16)
94 | private val modelViewMatrix = FloatArray(16)
95 | private val modelViewProjectionMatrix = FloatArray(16)
96 |
97 | // Set some default material properties to use for lighting.
98 | private var ambient = 0.3f
99 | private var diffuse = 1.0f
100 | private var specular = 1.0f
101 | private var specularPower = 6.0f
102 | private var useDepthForOcclusion = false
103 | private var depthAspectRatio = 0.0f
104 | private var uvTransform: FloatArray? = null
105 | private var depthTextureId = 0
106 |
107 | /**
108 | * Creates and initializes OpenGL resources needed for rendering the model.
109 | *
110 | * @param context Context for loading the shader and below-named model and texture assets.
111 | * @param objAssetName Name of the OBJ file containing the model geometry.
112 | * @param diffuseTextureAssetName Name of the PNG file containing the diffuse texture map.
113 | */
114 | @Throws(IOException::class)
115 | fun createOnGlThread(context: Context, objAssetName: String?, diffuseTextureAssetName: String?) {
116 | // Compiles and loads the shader based on the current configuration.
117 | compileAndLoadShaderProgram(context)
118 |
119 | // Read the texture.
120 | val textureBitmap = BitmapFactory.decodeStream(context.assets.open(diffuseTextureAssetName!!))
121 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0)
122 | GLES20.glGenTextures(textures.size, textures, 0)
123 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0])
124 | GLES20.glTexParameteri(
125 | GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR_MIPMAP_LINEAR)
126 | GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR)
127 | GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, textureBitmap, 0)
128 | GLES20.glGenerateMipmap(GLES20.GL_TEXTURE_2D)
129 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0)
130 | textureBitmap.recycle()
131 | ShaderUtil.checkGLError(TAG, "Texture loading")
132 |
133 | // Read the obj file.
134 | val objInputStream = context.assets.open(objAssetName!!)
135 | var obj = ObjReader.read(objInputStream)
136 |
137 | // Prepare the Obj so that its structure is suitable for
138 | // rendering with OpenGL:
139 | // 1. Triangulate it
140 | // 2. Make sure that texture coordinates are not ambiguous
141 | // 3. Make sure that normals are not ambiguous
142 | // 4. Convert it to single-indexed data
143 | obj = ObjUtils.convertToRenderable(obj)
144 |
145 | // OpenGL does not use Java arrays. ByteBuffers are used instead to provide data in a format
146 | // that OpenGL understands.
147 |
148 | // Obtain the data from the OBJ, as direct buffers:
149 | val wideIndices = ObjData.getFaceVertexIndices(obj, 3)
150 | val vertices = ObjData.getVertices(obj)
151 | val texCoords = ObjData.getTexCoords(obj, 2)
152 | val normals = ObjData.getNormals(obj)
153 |
154 | // Convert int indices to shorts for GL ES 2.0 compatibility
155 | val indices = ByteBuffer.allocateDirect(2 * wideIndices.limit())
156 | .order(ByteOrder.nativeOrder())
157 | .asShortBuffer()
158 | while (wideIndices.hasRemaining()) {
159 | indices.put(wideIndices.get().toShort())
160 | }
161 | indices.rewind()
162 | val buffers = IntArray(2)
163 | GLES20.glGenBuffers(2, buffers, 0)
164 | vertexBufferId = buffers[0]
165 | indexBufferId = buffers[1]
166 |
167 | // Load vertex buffer
168 | verticesBaseAddress = 0
169 | texCoordsBaseAddress = verticesBaseAddress + 4 * vertices.limit()
170 | normalsBaseAddress = texCoordsBaseAddress + 4 * texCoords.limit()
171 | val totalBytes = normalsBaseAddress + 4 * normals.limit()
172 | GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vertexBufferId)
173 | GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, totalBytes, null, GLES20.GL_STATIC_DRAW)
174 | GLES20.glBufferSubData(
175 | GLES20.GL_ARRAY_BUFFER, verticesBaseAddress, 4 * vertices.limit(), vertices)
176 | GLES20.glBufferSubData(
177 | GLES20.GL_ARRAY_BUFFER, texCoordsBaseAddress, 4 * texCoords.limit(), texCoords)
178 | GLES20.glBufferSubData(
179 | GLES20.GL_ARRAY_BUFFER, normalsBaseAddress, 4 * normals.limit(), normals)
180 | GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0)
181 |
182 | // Load index buffer
183 | GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, indexBufferId)
184 | indexCount = indices.limit()
185 | GLES20.glBufferData(
186 | GLES20.GL_ELEMENT_ARRAY_BUFFER, 2 * indexCount, indices, GLES20.GL_STATIC_DRAW)
187 | GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, 0)
188 | ShaderUtil.checkGLError(TAG, "OBJ buffer load")
189 | Matrix.setIdentityM(modelMatrix, 0)
190 | }
191 |
192 | /**
193 | * Selects the blending mode for rendering.
194 | *
195 | * @param blendMode The blending mode. Null indicates no blending (opaque rendering).
196 | */
197 | fun setBlendMode(blendMode: BlendMode?) {
198 | this.blendMode = blendMode
199 | }
200 |
201 | /**
202 | * Specifies whether to use the depth texture to perform depth-based occlusion of virtual objects
203 | * from real-world geometry.
204 | *
205 | *
206 | * This function is a no-op if the value provided is the same as what is already set. If the
207 | * value changes, this function will recompile and reload the shader program to either
208 | * enable/disable depth-based occlusion. NOTE: recompilation of the shader is inefficient. This
209 | * code could be optimized to precompile both versions of the shader.
210 | *
211 | * @param context Context for loading the shader.
212 | * @param useDepthForOcclusion Specifies whether to use the depth texture to perform occlusion
213 | * during rendering of virtual objects.
214 | */
215 | @Throws(IOException::class)
216 | fun setUseDepthForOcclusion(context: Context, useDepthForOcclusion: Boolean) {
217 | if (this.useDepthForOcclusion == useDepthForOcclusion) {
218 | return // No change, does nothing.
219 | }
220 |
221 | // Toggles the occlusion rendering mode and recompiles the shader.
222 | this.useDepthForOcclusion = useDepthForOcclusion
223 | compileAndLoadShaderProgram(context)
224 | }
225 |
226 | @Throws(IOException::class)
227 | private fun compileAndLoadShaderProgram(context: Context) {
228 | // Compiles and loads the shader program based on the selected mode.
229 | val defineValuesMap: MutableMap = TreeMap()
230 | defineValuesMap[USE_DEPTH_FOR_OCCLUSION_SHADER_FLAG] = if (useDepthForOcclusion) 1 else 0
231 | val vertexShader = ShaderUtil.loadGLShader(TAG, context, GLES20.GL_VERTEX_SHADER, VERTEX_SHADER_NAME)
232 | val fragmentShader = ShaderUtil.loadGLShader(
233 | TAG, context, GLES20.GL_FRAGMENT_SHADER, FRAGMENT_SHADER_NAME, defineValuesMap)
234 | program = GLES20.glCreateProgram()
235 | GLES20.glAttachShader(program, vertexShader)
236 | GLES20.glAttachShader(program, fragmentShader)
237 | GLES20.glLinkProgram(program)
238 | GLES20.glUseProgram(program)
239 | ShaderUtil.checkGLError(TAG, "Program creation")
240 | modelViewUniform = GLES20.glGetUniformLocation(program, "u_ModelView")
241 | modelViewProjectionUniform = GLES20.glGetUniformLocation(program, "u_ModelViewProjection")
242 | positionAttribute = GLES20.glGetAttribLocation(program, "a_Position")
243 | normalAttribute = GLES20.glGetAttribLocation(program, "a_Normal")
244 | texCoordAttribute = GLES20.glGetAttribLocation(program, "a_TexCoord")
245 | textureUniform = GLES20.glGetUniformLocation(program, "u_Texture")
246 | lightingParametersUniform = GLES20.glGetUniformLocation(program, "u_LightingParameters")
247 | materialParametersUniform = GLES20.glGetUniformLocation(program, "u_MaterialParameters")
248 | colorCorrectionParameterUniform = GLES20.glGetUniformLocation(program, "u_ColorCorrectionParameters")
249 | colorUniform = GLES20.glGetUniformLocation(program, "u_ObjColor")
250 |
251 | // Occlusion Uniforms.
252 | if (useDepthForOcclusion) {
253 | depthTextureUniform = GLES20.glGetUniformLocation(program, "u_DepthTexture")
254 | depthUvTransformUniform = GLES20.glGetUniformLocation(program, "u_DepthUvTransform")
255 | depthAspectRatioUniform = GLES20.glGetUniformLocation(program, "u_DepthAspectRatio")
256 | }
257 | ShaderUtil.checkGLError(TAG, "Program parameters")
258 | }
259 |
260 | /**
261 | * Updates the object model matrix and applies scaling.
262 | *
263 | * @param modelMatrix A 4x4 model-to-world transformation matrix, stored in column-major order.
264 | * @param scaleFactor A separate scaling factor to apply before the `modelMatrix`.
265 | * @see android.opengl.Matrix
266 | */
267 | fun updateModelMatrix(modelMatrix: FloatArray?, scaleFactor: Float) {
268 | val scaleMatrix = FloatArray(16)
269 | Matrix.setIdentityM(scaleMatrix, 0)
270 | scaleMatrix[0] = scaleFactor
271 | scaleMatrix[5] = scaleFactor
272 | scaleMatrix[10] = scaleFactor
273 | Matrix.multiplyMM(this.modelMatrix, 0, modelMatrix, 0, scaleMatrix, 0)
274 | }
275 |
276 | /**
277 | * Sets the surface characteristics of the rendered model.
278 | *
279 | * @param ambient Intensity of non-directional surface illumination.
280 | * @param diffuse Diffuse (matte) surface reflectivity.
281 | * @param specular Specular (shiny) surface reflectivity.
282 | * @param specularPower Surface shininess. Larger values result in a smaller, sharper specular
283 | * highlight.
284 | */
285 | fun setMaterialProperties(
286 | ambient: Float, diffuse: Float, specular: Float, specularPower: Float) {
287 | this.ambient = ambient
288 | this.diffuse = diffuse
289 | this.specular = specular
290 | this.specularPower = specularPower
291 | }
292 |
293 | /**
294 | * Draws the model.
295 | *
296 | * @param cameraView A 4x4 view matrix, in column-major order.
297 | * @param cameraPerspective A 4x4 projection matrix, in column-major order.
298 | * @param colorCorrectionRgba Illumination intensity. Combined with diffuse and specular material
299 | * properties.
300 | * @see .setBlendMode
301 | * @see .updateModelMatrix
302 | * @see .setMaterialProperties
303 | * @see android.opengl.Matrix
304 | */
305 | @JvmOverloads
306 | fun draw(
307 | cameraView: FloatArray?,
308 | cameraPerspective: FloatArray?,
309 | colorCorrectionRgba: FloatArray?,
310 | objColor: FloatArray? = DEFAULT_COLOR) {
311 | ShaderUtil.checkGLError(TAG, "Before draw")
312 |
313 | // Build the ModelView and ModelViewProjection matrices
314 | // for calculating object position and light.
315 | Matrix.multiplyMM(modelViewMatrix, 0, cameraView, 0, modelMatrix, 0)
316 | Matrix.multiplyMM(modelViewProjectionMatrix, 0, cameraPerspective, 0, modelViewMatrix, 0)
317 | GLES20.glUseProgram(program)
318 |
319 | // Set the lighting environment properties.
320 | Matrix.multiplyMV(viewLightDirection, 0, modelViewMatrix, 0, LIGHT_DIRECTION, 0)
321 | normalizeVec3(viewLightDirection)
322 | GLES20.glUniform4f(
323 | lightingParametersUniform,
324 | viewLightDirection[0],
325 | viewLightDirection[1],
326 | viewLightDirection[2],
327 | 1f)
328 | GLES20.glUniform4fv(colorCorrectionParameterUniform, 1, colorCorrectionRgba, 0)
329 |
330 | // Set the object color property.
331 | GLES20.glUniform4fv(colorUniform, 1, objColor, 0)
332 |
333 | // Set the object material properties.
334 | GLES20.glUniform4f(materialParametersUniform, ambient, diffuse, specular, specularPower)
335 |
336 | // Attach the object texture.
337 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0)
338 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0])
339 | GLES20.glUniform1i(textureUniform, 0)
340 |
341 | // Occlusion parameters.
342 | if (useDepthForOcclusion) {
343 | // Attach the depth texture.
344 | GLES20.glActiveTexture(GLES20.GL_TEXTURE1)
345 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, depthTextureId)
346 | GLES20.glUniform1i(depthTextureUniform, 1)
347 |
348 | // Set the depth texture uv transform.
349 | GLES20.glUniformMatrix3fv(depthUvTransformUniform, 1, false, uvTransform, 0)
350 | GLES20.glUniform1f(depthAspectRatioUniform, depthAspectRatio)
351 | }
352 |
353 | // Set the vertex attributes.
354 | GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vertexBufferId)
355 | GLES20.glVertexAttribPointer(
356 | positionAttribute, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, 0, verticesBaseAddress)
357 | GLES20.glVertexAttribPointer(normalAttribute, 3, GLES20.GL_FLOAT, false, 0, normalsBaseAddress)
358 | GLES20.glVertexAttribPointer(
359 | texCoordAttribute, 2, GLES20.GL_FLOAT, false, 0, texCoordsBaseAddress)
360 | GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0)
361 |
362 | // Set the ModelViewProjection matrix in the shader.
363 | GLES20.glUniformMatrix4fv(modelViewUniform, 1, false, modelViewMatrix, 0)
364 | GLES20.glUniformMatrix4fv(modelViewProjectionUniform, 1, false, modelViewProjectionMatrix, 0)
365 |
366 | // Enable vertex arrays
367 | GLES20.glEnableVertexAttribArray(positionAttribute)
368 | GLES20.glEnableVertexAttribArray(normalAttribute)
369 | GLES20.glEnableVertexAttribArray(texCoordAttribute)
370 | if (blendMode != null) {
371 | GLES20.glEnable(GLES20.GL_BLEND)
372 | when (blendMode) {
373 | BlendMode.Shadow -> {
374 | // Multiplicative blending function for Shadow.
375 | GLES20.glDepthMask(false)
376 | GLES20.glBlendFunc(GLES20.GL_ZERO, GLES20.GL_ONE_MINUS_SRC_ALPHA)
377 | }
378 | BlendMode.AlphaBlending -> {
379 | // Alpha blending function, with the depth mask enabled.
380 | GLES20.glDepthMask(true)
381 |
382 | // Textures are loaded with premultiplied alpha
383 | // (https://developer.android.com/reference/android/graphics/BitmapFactory.Options#inPremultiplied),
384 | // so we use the premultiplied alpha blend factors.
385 | GLES20.glBlendFunc(GLES20.GL_ONE, GLES20.GL_ONE_MINUS_SRC_ALPHA)
386 | }
387 | }
388 | }
389 | GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, indexBufferId)
390 | GLES20.glDrawElements(GLES20.GL_TRIANGLES, indexCount, GLES20.GL_UNSIGNED_SHORT, 0)
391 | GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, 0)
392 | if (blendMode != null) {
393 | GLES20.glDisable(GLES20.GL_BLEND)
394 | GLES20.glDepthMask(true)
395 | }
396 |
397 | // Disable vertex arrays
398 | GLES20.glDisableVertexAttribArray(positionAttribute)
399 | GLES20.glDisableVertexAttribArray(normalAttribute)
400 | GLES20.glDisableVertexAttribArray(texCoordAttribute)
401 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0)
402 | ShaderUtil.checkGLError(TAG, "After draw")
403 | }
404 |
405 | fun setUvTransformMatrix(transform: FloatArray?) {
406 | uvTransform = transform
407 | }
408 |
409 | fun setDepthTexture(textureId: Int, width: Int, height: Int) {
410 | depthTextureId = textureId
411 | depthAspectRatio = width.toFloat() / height.toFloat()
412 | }
413 |
414 | companion object {
415 | private val TAG = ObjectRenderer::class.java.simpleName
416 |
417 | // Shader names.
418 | private const val VERTEX_SHADER_NAME = "shaders/ar_object.vert"
419 | private const val FRAGMENT_SHADER_NAME = "shaders/ar_object.frag"
420 | private const val COORDS_PER_VERTEX = 3
421 | private val DEFAULT_COLOR = floatArrayOf(0f, 0f, 0f, 0f)
422 |
423 | // Note: the last component must be zero to avoid applying the translational part of the matrix.
424 | private val LIGHT_DIRECTION = floatArrayOf(0.250f, 0.866f, 0.433f, 0.0f)
425 |
426 | // Depth-for-Occlusion parameters.
427 | private const val USE_DEPTH_FOR_OCCLUSION_SHADER_FLAG = "USE_DEPTH_FOR_OCCLUSION"
428 | private fun normalizeVec3(v: FloatArray) {
429 | val reciprocalLength = 1.0f / Math.sqrt((v[0] * v[0] + v[1] * v[1] + v[2] * v[2]).toDouble()).toFloat()
430 | v[0] *= reciprocalLength
431 | v[1] *= reciprocalLength
432 | v[2] *= reciprocalLength
433 | }
434 | }
435 | }
--------------------------------------------------------------------------------
/app/src/main/java/com/google/ar/core/examples/kotlin/hellorecordingplayback/HelloRecordingPlaybackActivity.kt:
--------------------------------------------------------------------------------
1 | package com.google.ar.core.examples.kotlin.hellorecordingplayback
2 |
3 | import android.Manifest.permission
4 | import android.content.pm.PackageManager
5 | import android.opengl.GLES20
6 | import android.opengl.GLSurfaceView
7 | import android.os.Bundle
8 | import android.util.Log
9 | import android.view.View
10 | import android.widget.TextView
11 | import androidx.appcompat.app.AppCompatActivity
12 | import androidx.core.app.ActivityCompat
13 | import androidx.core.content.ContextCompat
14 | import androidx.databinding.DataBindingUtil
15 | import com.google.ar.core.*
16 | import com.google.ar.core.ArCoreApk.InstallStatus
17 | import com.google.ar.core.examples.kotlin.common.helpers.DisplayRotationHelper
18 | import com.google.ar.core.examples.kotlin.common.helpers.FullScreenHelper.setFullScreenOnWindowFocusChanged
19 | import com.google.ar.core.examples.kotlin.common.helpers.SnackbarHelper
20 | import com.google.ar.core.examples.kotlin.common.helpers.TapHelper
21 | import com.google.ar.core.examples.kotlin.common.helpers.TrackingStateHelper
22 | import com.google.ar.core.examples.kotlin.common.rendering.BackgroundRenderer
23 | import com.google.ar.core.examples.kotlin.common.rendering.ObjectRenderer
24 | import com.google.ar.core.examples.kotlin.common.rendering.PlaneRenderer
25 | import com.google.ar.core.examples.kotlin.common.rendering.PointCloudRenderer
26 | import com.google.ar.core.examples.kotlin.hellorecordingplayback.databinding.ActivityMainBinding
27 | import com.google.ar.core.exceptions.*
28 | import org.joda.time.DateTime
29 | import java.io.File
30 | import java.io.IOException
31 | import java.nio.ByteBuffer
32 | import java.util.*
33 | import java.util.concurrent.atomic.AtomicReference
34 | import javax.microedition.khronos.egl.EGLConfig
35 | import javax.microedition.khronos.opengles.GL10
36 |
37 | /**
38 | * This is a simple example that shows how to create an augmented reality (AR) app that demonstrates
39 | * recording and playback of the AR session:
40 | *
41 | *
42 | * - During recording, ARCore captures device camera and IMU sensor to an MP4 video file.
43 | * * During plaback, ARCore replays the recorded session.
44 | * * The app visualizes detected planes.
45 | * * The user can tap on a detected plane to place a 3D model. These taps are simultaneously
46 | * recorded in a separate MP4 data track, so that the taps can be replayed during playback.
47 | *
48 | */
49 | class HelloRecordingPlaybackActivity : AppCompatActivity(), GLSurfaceView.Renderer {
50 | // Application states.
51 | private enum class AppState {
52 | IDLE, RECORDING, PLAYBACK
53 | }
54 |
55 | // The app state so that it can be preserved when the activity restarts. This is also used to
56 | // update the UI.
57 | private val currentState = AtomicReference(AppState.IDLE)
58 | private var playbackDatasetPath: String? = null
59 | private var lastRecordingDatasetPath: String? = null
60 | private var session: Session? = null
61 | private val messageSnackbarHelper = SnackbarHelper()
62 | private var displayRotationHelper: DisplayRotationHelper? = null
63 | private val trackingStateHelper = TrackingStateHelper(this)
64 | private var tapHelper: TapHelper? = null
65 | private var surfaceView: GLSurfaceView? = null
66 | lateinit var binding: ActivityMainBinding
67 |
68 | // The Renderers are created here, and initialized when the GL surface is created.
69 | private val backgroundRenderer = BackgroundRenderer()
70 | private val virtualObject = ObjectRenderer()
71 | private val virtualObjectShadow = ObjectRenderer()
72 | private val planeRenderer = PlaneRenderer()
73 | private val pointCloudRenderer = PointCloudRenderer()
74 |
75 | // Temporary matrix allocated here to reduce number of allocations for each frame.
76 | private val anchorMatrix = FloatArray(16)
77 |
78 | // Anchors created from taps used for object placing with a given color.
79 | private class ColoredAnchor(val anchor: Anchor, val color: FloatArray)
80 |
81 | private val anchors = ArrayList()
82 | private val anchorsToBeRecorded = ArrayList()
83 | private var installRequested = false
84 | override fun onCreate(savedInstanceState: Bundle?) {
85 | super.onCreate(savedInstanceState)
86 | loadInternalStateFromIntentExtras()
87 | binding = DataBindingUtil.setContentView(this, R.layout.activity_main)
88 |
89 | surfaceView = findViewById(R.id.surfaceview)
90 | displayRotationHelper = DisplayRotationHelper( this)
91 |
92 | // Set up touch listener.
93 | tapHelper = TapHelper( this)
94 | setupSurfaceView()
95 | installRequested = false
96 |
97 |
98 | setupClickListeners()
99 | updateUI()
100 | }
101 |
102 | private fun setupClickListeners() {
103 | binding.startRecordingButton.setOnClickListener { startRecording() }
104 | binding.stopRecordingButton.setOnClickListener { stopRecording() }
105 | binding.playbackButton.setOnClickListener { startPlayback() }
106 | binding.closePlaybackButton.setOnClickListener { stopPlayback() }
107 | }
108 |
109 | private fun setupSurfaceView() {
110 | surfaceView!!.setOnTouchListener(tapHelper)
111 |
112 | // Set up renderer.
113 | binding.surfaceview.preserveEGLContextOnPause = true
114 | binding.surfaceview.setEGLContextClientVersion(2)
115 | binding.surfaceview.setEGLConfigChooser(8, 8, 8, 8, 16, 0) // Alpha used for plane blending.
116 | binding.surfaceview.setRenderer(this)
117 | binding.surfaceview.renderMode = GLSurfaceView.RENDERMODE_CONTINUOUSLY
118 | binding.surfaceview.setWillNotDraw(false)
119 | }
120 |
121 | override fun onResume() {
122 | super.onResume()
123 | if (session == null) {
124 | var exception: Exception? = null
125 | var message: String? = null
126 | try {
127 | when (ArCoreApk.getInstance().requestInstall(this, !installRequested)) {
128 | InstallStatus.INSTALL_REQUESTED -> {
129 | installRequested = true
130 | return
131 | }
132 | InstallStatus.INSTALLED -> {
133 | }
134 | else ->{
135 |
136 | }
137 | }
138 |
139 | // If we did not yet obtain runtime permission on Android M and above, now is a good time to
140 | // ask the user for it.
141 | if (requestPermissions()) {
142 | return
143 | }
144 |
145 | // Create the session.
146 | session = Session( /* context= */this)
147 | if (currentState.get() == AppState.PLAYBACK) {
148 | // Dataset playback will start when session.resume() is called.
149 | setPlaybackDatasetPath()
150 | }
151 | } catch (e: UnavailableArcoreNotInstalledException) {
152 | message = "Please install Google Play Services for AR (ARCore)"
153 | exception = e
154 | } catch (e: UnavailableUserDeclinedInstallationException) {
155 | message = "Please install Google Play Services for AR (ARCore)"
156 | exception = e
157 | } catch (e: UnavailableApkTooOldException) {
158 | message = "Please update Google Play Services for AR (ARCore)"
159 | exception = e
160 | } catch (e: UnavailableSdkTooOldException) {
161 | message = "Please update this app"
162 | exception = e
163 | } catch (e: UnavailableDeviceNotCompatibleException) {
164 | message = "This device does not support AR"
165 | exception = e
166 | } catch (e: Exception) {
167 | message = "Failed to create AR session"
168 | exception = e
169 | }
170 | if (message != null) {
171 | messageSnackbarHelper.showError(this, "$message $exception")
172 | Log.e(TAG, "Exception creating session", exception)
173 | return
174 | }
175 | }
176 |
177 | // Note that order matters - see the note in onPause(), the reverse applies here.
178 | try {
179 | // Playback will now start if an MP4 dataset has been set.
180 | session!!.resume()
181 | } catch (e: CameraNotAvailableException) {
182 | messageSnackbarHelper.showError(this, "Camera not available. Try restarting the app.")
183 | session = null
184 | return
185 | }
186 | if (currentState.get() == AppState.PLAYBACK) {
187 | // Must be called after dataset playback is started by call to session.resume().
188 | checkPlaybackStatus()
189 | }
190 | surfaceView!!.onResume()
191 | displayRotationHelper!!.onResume()
192 | updateUI()
193 | }
194 |
195 | public override fun onPause() {
196 | super.onPause()
197 | if (session != null) {
198 | // Note that the order matters - GLSurfaceView is paused first so that it does not try
199 | // to query the session. If Session is paused before GLSurfaceView, GLSurfaceView may
200 | // still call session.update() and get a SessionPausedException.
201 | displayRotationHelper!!.onPause()
202 | binding.surfaceview.onPause()
203 | if (currentState.get() == AppState.RECORDING) {
204 | stopRecording()
205 | }
206 | session!!.pause()
207 | }
208 | }
209 |
210 | override fun onRequestPermissionsResult(requestCode: Int, permissions: Array, results: IntArray) {
211 | super.onRequestPermissionsResult(requestCode, permissions, results)
212 | if (requestCode == PERMISSIONS_REQUEST_CODE) {
213 | for (i in results.indices) {
214 | if (results[i] != PackageManager.PERMISSION_GRANTED) {
215 | logAndShowErrorMessage("Cannot start app, missing permission: " + permissions[i])
216 | finish()
217 | }
218 | }
219 | }
220 | }
221 |
222 | override fun onWindowFocusChanged(hasFocus: Boolean) {
223 | super.onWindowFocusChanged(hasFocus)
224 | setFullScreenOnWindowFocusChanged(this, hasFocus)
225 | }
226 |
227 | override fun onSurfaceCreated(gl: GL10, config: EGLConfig) {
228 | GLES20.glClearColor(0.1f, 0.1f, 0.1f, 1.0f)
229 |
230 | // Prepare the rendering objects. This involves reading shaders, so may throw an IOException.
231 | try {
232 | // Create the texture and pass it to ARCore session to be filled during update().
233 | backgroundRenderer.createOnGlThread( this)
234 | planeRenderer.createOnGlThread( this, "models/trigrid.png")
235 | pointCloudRenderer.createOnGlThread( this)
236 | virtualObject.createOnGlThread( this, "models/andy.obj", "models/andy.png")
237 | virtualObject.setMaterialProperties(0.0f, 2.0f, 0.5f, 6.0f)
238 | virtualObjectShadow.createOnGlThread(
239 | this, "models/andy_shadow.obj", "models/andy_shadow.png")
240 | virtualObjectShadow.setBlendMode(ObjectRenderer.BlendMode.Shadow)
241 | virtualObjectShadow.setMaterialProperties(1.0f, 0.0f, 0.0f, 1.0f)
242 | } catch (e: IOException) {
243 | Log.e(TAG, "Failed to read an asset file", e)
244 | }
245 | }
246 |
247 | override fun onSurfaceChanged(gl: GL10, width: Int, height: Int) {
248 | displayRotationHelper!!.onSurfaceChanged(width, height)
249 | GLES20.glViewport(0, 0, width, height)
250 | }
251 |
252 | override fun onDrawFrame(gl: GL10) {
253 | // Clear screen to tell driver it should not load any pixels from previous frame.
254 | GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT or GLES20.GL_DEPTH_BUFFER_BIT)
255 |
256 | // Do not render anything or call session methods until session is created.
257 | if (session == null) {
258 | return
259 | }
260 |
261 | // Notify ARCore session that the view size changed so that the projection matrix and
262 | // the video background can be properly adjusted.
263 | displayRotationHelper!!.updateSessionIfNeeded(session!!)
264 | try {
265 | session!!.setCameraTextureName(backgroundRenderer.textureId)
266 |
267 | // Obtain the current frame from ARSession. When the configuration is set to
268 | // UpdateMode.BLOCKING (it is by default), this will throttle the rendering to the
269 | // camera framerate.
270 | val frame = session!!.update()
271 | val camera = frame.camera
272 |
273 | // Handle one tap per frame.
274 | val anchor = handleTap(frame, camera)
275 | if (anchor != null) {
276 | // If we created an anchor, then try to record it.
277 | anchorsToBeRecorded.add(anchor)
278 | }
279 |
280 | // Try to record any anchors that have not been recorded yet.
281 | recordAnchors(session!!, frame, camera)
282 |
283 | // If we are playing back, then add any recorded anchors to the session.
284 | addRecordedAnchors(session!!, frame, camera)
285 |
286 | // If frame is ready, render camera preview image to the GL surface.
287 | backgroundRenderer.draw(frame)
288 |
289 | // Keep the screen unlocked while tracking, but allow it to lock when tracking stops.
290 | trackingStateHelper.updateKeepScreenOnFlag(camera.trackingState)
291 |
292 | // If not tracking, don't draw 3D objects, show tracking failure reason instead.
293 | if (camera.trackingState == TrackingState.PAUSED) {
294 | messageSnackbarHelper.showMessage(
295 | this, TrackingStateHelper.getTrackingFailureReasonString(camera))
296 | return
297 | }
298 |
299 | // Get projection matrix.
300 | val projmtx = FloatArray(16)
301 | camera.getProjectionMatrix(projmtx, 0, 0.1f, 100.0f)
302 |
303 | // Get camera matrix and draw.
304 | val viewmtx = FloatArray(16)
305 | camera.getViewMatrix(viewmtx, 0)
306 |
307 | // Compute lighting from average intensity of the image.
308 | // The first three components are color scaling factors.
309 | // The last one is the average pixel intensity in gamma space.
310 | val colorCorrectionRgba = FloatArray(4)
311 | frame.lightEstimate.getColorCorrection(colorCorrectionRgba, 0)
312 | frame.acquirePointCloud().use { pointCloud ->
313 | pointCloudRenderer.update(pointCloud)
314 | pointCloudRenderer.draw(viewmtx, projmtx)
315 | }
316 |
317 | // No tracking failure at this point. If we detected any planes, then hide the
318 | // message UI. If not planes detected, show searching planes message.
319 | if (hasTrackingPlane()) {
320 | messageSnackbarHelper.hide(this)
321 | } else {
322 | messageSnackbarHelper.showMessage(this, SEARCHING_PLANE_MESSAGE)
323 | }
324 |
325 | // Visualize detected planes.
326 | planeRenderer.drawPlanes(
327 | session!!.getAllTrackables(Plane::class.java), camera.displayOrientedPose, projmtx)
328 |
329 | // Visualize anchors created by tapping.
330 | val scaleFactor = 1.0f
331 | for (coloredAnchor in anchors) {
332 | if (coloredAnchor.anchor.trackingState != TrackingState.TRACKING) {
333 | continue
334 | }
335 | // Get the current pose of an Anchor in world space. The Anchor pose is updated
336 | // during calls to session.update() as ARCore refines its estimate of the world.
337 | coloredAnchor.anchor.pose.toMatrix(anchorMatrix, 0)
338 |
339 | // Update and draw the model and its shadow.
340 | virtualObject.updateModelMatrix(anchorMatrix, scaleFactor)
341 | virtualObjectShadow.updateModelMatrix(anchorMatrix, scaleFactor)
342 | virtualObject.draw(viewmtx, projmtx, colorCorrectionRgba, coloredAnchor.color)
343 | virtualObjectShadow.draw(viewmtx, projmtx, colorCorrectionRgba, coloredAnchor.color)
344 | }
345 | } catch (t: Throwable) {
346 | // Avoid crashing the application due to unhandled exceptions.
347 | Log.e(TAG, "Exception on the OpenGL thread", t)
348 | }
349 | }
350 |
351 | /** Try to create an anchor if the user has tapped the screen. */
352 | private fun handleTap(frame: Frame, camera: Camera): ColoredAnchor? {
353 | // Handle only one tap per frame, as taps are usually low frequency compared to frame rate.
354 | val tap = tapHelper!!.poll()
355 | if (tap != null && camera.trackingState == TrackingState.TRACKING) {
356 | for (hit in frame.hitTest(tap)) {
357 | // Check if any plane was hit, and if it was hit inside the plane polygon.
358 | val trackable = hit.trackable
359 | // Creates an anchor if a plane or an oriented point was hit.
360 | if ((trackable is Plane
361 | && trackable.isPoseInPolygon(hit.hitPose)
362 | && PlaneRenderer.calculateDistanceToPlane(hit.hitPose, camera.pose) > 0)
363 | || (trackable is Point
364 | && trackable.orientationMode
365 | == Point.OrientationMode.ESTIMATED_SURFACE_NORMAL)) {
366 | // Hits are sorted by depth. Consider only closest hit on a plane or oriented point.
367 | // Cap the number of objects created. This avoids overloading both the
368 | // rendering system and ARCore.
369 | if (anchors.size >= 20) {
370 | anchors[0].anchor.detach()
371 | anchors.removeAt(0)
372 | }
373 |
374 | // Assign a color to the object for rendering based on the trackable type
375 | // this anchor attached to.
376 | val objColor: FloatArray
377 | objColor = if (trackable is Point) {
378 | floatArrayOf(66.0f, 133.0f, 244.0f, 255.0f) // Blue.
379 | } else if (trackable is Plane) {
380 | floatArrayOf(139.0f, 195.0f, 74.0f, 255.0f) // Green.
381 | } else {
382 | DEFAULT_COLOR
383 | }
384 | val anchor = ColoredAnchor(hit.createAnchor(), objColor)
385 | // Adding an Anchor tells ARCore that it should track this position in
386 | // space. This anchor is created on the Plane to place the 3D model
387 | // in the correct position relative both to the world and to the plane.
388 | anchors.add(anchor)
389 | return anchor
390 | }
391 | }
392 | }
393 | return null
394 | }
395 |
396 | /**
397 | * Try to add anchors to an MP4 data track track if the app is currently recording.
398 | *
399 | *
400 | * Track data recording can sometimes fail due an image not being available for recording in
401 | * ARCore, so we try to record all anchors that have not been recorded yet.
402 | */
403 | private fun recordAnchors(session: Session, frame: Frame, camera: Camera) {
404 | if (session.recordingStatus != RecordingStatus.OK) {
405 | // We do not record anchors created before we started recording.
406 | anchorsToBeRecorded.clear()
407 | return
408 | }
409 | val anchorIterator = anchorsToBeRecorded.iterator()
410 | while (anchorIterator.hasNext()) {
411 | val anchor = anchorIterator.next()
412 | // Transform the anchor pose world coordinates in to camera coordinate frame for easy
413 | // placement during playback.
414 | val pose = camera.pose.inverse().compose(anchor.anchor.pose)
415 | val translation = pose.translation
416 | val quaternion = pose.rotationQuaternion
417 | val payload = ByteBuffer.allocate(4 * (translation.size + quaternion.size + anchor.color.size))
418 | val floatView = payload.asFloatBuffer()
419 | floatView.put(translation)
420 | floatView.put(quaternion)
421 | floatView.put(anchor.color)
422 | try {
423 | frame.recordTrackData(ANCHOR_TRACK_ID, payload)
424 | anchorIterator.remove()
425 | } catch (e: IllegalStateException) {
426 | Log.e(TAG, "Could not record anchor into external data track.", e)
427 | return
428 | }
429 | }
430 | }
431 |
432 | /** During playback, recreate any anchors that were placed during recording. */
433 | private fun addRecordedAnchors(session: Session, frame: Frame, camera: Camera) {
434 | for (data in frame.getUpdatedTrackData(ANCHOR_TRACK_ID)) {
435 | val payload = data.data
436 | val translation = FloatArray(3)
437 | val quaternion = FloatArray(4)
438 | val color = FloatArray(4)
439 | val floatView = payload.asFloatBuffer()
440 | floatView[translation]
441 | floatView[quaternion]
442 | floatView[color]
443 |
444 | // Transform the recorded anchor pose in the camera coordinate frame back into world
445 | // coordinates.
446 | val pose = camera.pose.compose(Pose(translation, quaternion))
447 | val anchor = ColoredAnchor(session.createAnchor(pose), color)
448 | anchors.add(anchor)
449 | }
450 | }
451 |
452 | /** Checks if we detected at least one plane. */
453 | private fun hasTrackingPlane(): Boolean {
454 | for (plane in session!!.getAllTrackables(Plane::class.java)) {
455 | if (plane.trackingState == TrackingState.TRACKING) {
456 | return true
457 | }
458 | }
459 | return false
460 | }
461 |
462 | /**
463 | * Requests any not (yet) granted required permissions needed for recording and playback.
464 | *
465 | *
466 | * Returns false if all permissions are already granted. Otherwise, requests missing
467 | * permissions and returns true.
468 | */
469 | private fun requestPermissions(): Boolean {
470 | val permissionsNotGranted: MutableList = ArrayList()
471 | for (permission in requiredPermissions) {
472 | if (ContextCompat.checkSelfPermission(this, permission)
473 | != PackageManager.PERMISSION_GRANTED) {
474 | permissionsNotGranted.add(permission)
475 | }
476 | }
477 | if (permissionsNotGranted.isEmpty()) {
478 | return false
479 | }
480 | ActivityCompat.requestPermissions(
481 | this, permissionsNotGranted.toTypedArray(), PERMISSIONS_REQUEST_CODE)
482 | return true
483 | }
484 |
485 | /** Sets the path of the MP4 dataset to playback. */
486 | private fun setPlaybackDatasetPath() {
487 | if (session!!.playbackStatus == PlaybackStatus.OK) {
488 | logAndShowErrorMessage("Session is already playing back.")
489 | setStateAndUpdateUI(AppState.PLAYBACK)
490 | return
491 | }
492 | if (playbackDatasetPath != null) {
493 | try {
494 | session!!.setPlaybackDataset(playbackDatasetPath)
495 | } catch (e: PlaybackFailedException) {
496 | val errorMsg = "Failed to set playback MP4 dataset. $e"
497 | Log.e(TAG, errorMsg, e)
498 | messageSnackbarHelper.showError(this, errorMsg)
499 | Log.d(TAG, "Setting app state to IDLE, as the playback is not in progress.")
500 | setStateAndUpdateUI(AppState.IDLE)
501 | return
502 | }
503 | setStateAndUpdateUI(AppState.PLAYBACK)
504 | }
505 | }
506 |
507 | /** Generates a new MP4 dataset path based on the current system time. */
508 | private val newDatasetPath: String?
509 | private get() {
510 | val baseDir = getExternalFilesDir(null) ?: return null
511 | return File(getExternalFilesDir(null), newMp4DatasetFilename).absolutePath
512 | }
513 |
514 | /** Updates UI behaviors based on current app state. */
515 | private fun updateUI() {
516 | when (currentState.get()) {
517 | AppState.IDLE -> {
518 | binding.startRecordingButton.visibility = View.VISIBLE
519 | binding.startRecordingButton.isEnabled = true
520 | binding.stopRecordingButton.visibility = View.INVISIBLE
521 | binding.stopRecordingButton.isEnabled = false
522 | binding.closePlaybackButton.visibility = View.INVISIBLE
523 | binding.closePlaybackButton.isEnabled = false
524 | binding.playbackButton.isEnabled = playbackDatasetPath != null
525 | binding.recordingPlaybackPathTextView!!.text = resources
526 | .getString(
527 | R.string.playback_path_text,
528 | if (playbackDatasetPath == null) "" else playbackDatasetPath)
529 | }
530 | AppState.RECORDING -> {
531 | binding.startRecordingButton.visibility = View.INVISIBLE
532 | binding.startRecordingButton.isEnabled = false
533 | binding.stopRecordingButton.visibility = View.VISIBLE
534 | binding.stopRecordingButton.isEnabled = true
535 | binding.closePlaybackButton.visibility = View.INVISIBLE
536 | binding.closePlaybackButton.isEnabled = false
537 | binding.playbackButton.isEnabled = false
538 | binding.recordingPlaybackPathTextView!!.text = resources
539 | .getString(
540 | R.string.recording_path_text,
541 | if (lastRecordingDatasetPath == null) "" else lastRecordingDatasetPath)
542 | }
543 | AppState.PLAYBACK -> {
544 |
545 | binding.recordingPlaybackPathTextView!!.text = ""
546 | binding.startRecordingButton.visibility = View.INVISIBLE
547 | binding.startRecordingButton.isEnabled = false
548 | binding.stopRecordingButton.visibility = View.INVISIBLE
549 | binding.stopRecordingButton.isEnabled = false
550 | binding.playbackButton.visibility = View.INVISIBLE
551 | binding.closePlaybackButton.isEnabled = true
552 | binding.closePlaybackButton.visibility = View.VISIBLE
553 | binding.playbackButton.isEnabled = false
554 |
555 | }
556 | }
557 | }
558 |
559 | /** Performs action when start_recording button is clicked. */
560 | private fun startRecording() {
561 | try {
562 | lastRecordingDatasetPath = newDatasetPath
563 | if (lastRecordingDatasetPath == null) {
564 | logAndShowErrorMessage("Failed to generate a MP4 dataset path for recording.")
565 | return
566 | }
567 | val anchorTrack = Track(session).setId(ANCHOR_TRACK_ID).setMimeType(ANCHOR_TRACK_MIME_TYPE)
568 | session!!.startRecording(
569 | RecordingConfig(session)
570 | .setMp4DatasetFilePath(lastRecordingDatasetPath)
571 | .setAutoStopOnPause(false)
572 | .addTrack(anchorTrack))
573 | } catch (e: RecordingFailedException) {
574 | val errorMessage = "Failed to start recording. $e"
575 | Log.e(TAG, errorMessage, e)
576 | messageSnackbarHelper.showError(this, errorMessage)
577 | return
578 | }
579 | if (session!!.recordingStatus != RecordingStatus.OK) {
580 | logAndShowErrorMessage(
581 | "Failed to start recording, recording status is " + session!!.recordingStatus)
582 | return
583 | }
584 | setStateAndUpdateUI(AppState.RECORDING)
585 | }
586 |
587 | /** Performs action when stop_recording button is clicked. */
588 | private fun stopRecording() {
589 | try {
590 | session!!.stopRecording()
591 | } catch (e: RecordingFailedException) {
592 | val errorMessage = "Failed to stop recording. $e"
593 | Log.e(TAG, errorMessage, e)
594 | messageSnackbarHelper.showError(this, errorMessage)
595 | return
596 | }
597 | if (session!!.recordingStatus == RecordingStatus.OK) {
598 | logAndShowErrorMessage(
599 | "Failed to stop recording, recording status is " + session!!.recordingStatus)
600 | return
601 | }
602 | if (File(lastRecordingDatasetPath).exists()) {
603 | playbackDatasetPath = lastRecordingDatasetPath
604 | Log.d(TAG, "MP4 dataset has been saved at: $playbackDatasetPath")
605 | } else {
606 | logAndShowErrorMessage(
607 | "Recording failed. File $lastRecordingDatasetPath wasn't created.")
608 | }
609 | setStateAndUpdateUI(AppState.IDLE)
610 | }
611 |
612 | /** Helper function to log error message and show it on the screen. */
613 | private fun logAndShowErrorMessage(errorMessage: String) {
614 | Log.e(TAG, errorMessage)
615 | messageSnackbarHelper.showError(this, errorMessage)
616 | }
617 |
618 | /** Helper function to set state and update UI. */
619 | private fun setStateAndUpdateUI(state: AppState) {
620 | currentState.set(state)
621 | updateUI()
622 | }
623 |
624 | /** Performs action when playback button is clicked. */
625 | private fun startPlayback() {
626 | if (playbackDatasetPath == null) {
627 | return
628 | }
629 | currentState.set(AppState.PLAYBACK)
630 | restartActivityWithIntentExtras()
631 | }
632 |
633 | /** Performs action when close_playback button is clicked. */
634 | private fun stopPlayback() {
635 | currentState.set(AppState.IDLE)
636 | restartActivityWithIntentExtras()
637 | }
638 |
639 | /** Checks the playback is in progress without issues. */
640 | private fun checkPlaybackStatus() {
641 | if (session!!.playbackStatus != PlaybackStatus.OK
642 | && session!!.playbackStatus != PlaybackStatus.FINISHED) {
643 | logAndShowErrorMessage(
644 | "Failed to start playback, playback status is: " + session!!.playbackStatus)
645 | setStateAndUpdateUI(AppState.IDLE)
646 | }
647 | }
648 |
649 | /**
650 | * Restarts current activity to enter or exit playback mode.
651 | *
652 | *
653 | * This method simulates an app with separate activities for recording and playback by
654 | * restarting the current activity and passing in the desired app state via an intent with extras.
655 | */
656 | private fun restartActivityWithIntentExtras() {
657 | val intent = this.intent
658 | val bundle = Bundle()
659 | bundle.putString(DESIRED_APP_STATE_KEY, currentState.get().name)
660 | bundle.putString(DESIRED_DATASET_PATH_KEY, playbackDatasetPath)
661 | intent.putExtras(bundle)
662 | finish()
663 | this.startActivity(intent)
664 | }
665 |
666 | /** Loads desired state from intent extras, if available. */
667 | private fun loadInternalStateFromIntentExtras() {
668 | if (intent == null || intent.extras == null) {
669 | return
670 | }
671 | val bundle = intent.extras
672 | if (bundle!!.containsKey(DESIRED_DATASET_PATH_KEY)) {
673 | playbackDatasetPath = intent.getStringExtra(DESIRED_DATASET_PATH_KEY)
674 | }
675 | if (bundle.containsKey(DESIRED_APP_STATE_KEY)) {
676 | val state = intent.getStringExtra(DESIRED_APP_STATE_KEY)
677 | if (state != null) {
678 | when (state) {
679 | "PLAYBACK" -> currentState.set(AppState.PLAYBACK)
680 | "IDLE" -> currentState.set(AppState.IDLE)
681 | "RECORDING" -> currentState.set(AppState.RECORDING)
682 | else -> {
683 | }
684 | }
685 | }
686 | }
687 | }
688 |
689 | companion object {
690 | private val TAG = HelloRecordingPlaybackActivity::class.java.simpleName
691 |
692 | // MP4 dataset naming convention: arcore-dataset-YYYY-MM-DD-hh-mm-ss.mp4
693 | private const val MP4_DATASET_FILENAME_TEMPLATE = "arcore-dataset-%s.mp4"
694 | private const val MP4_DATASET_TIMESTAMP_FORMAT = "yyyy-MM-dd-HH-mm-ss"
695 |
696 | // Keys to keep track of the active dataset and playback state between restarts.
697 | private const val DESIRED_DATASET_PATH_KEY = "desired_dataset_path_key"
698 | private const val DESIRED_APP_STATE_KEY = "desired_app_state_key"
699 | private const val PERMISSIONS_REQUEST_CODE = 0
700 |
701 | // Recording and playback requires android.permission.WRITE_EXTERNAL_STORAGE and
702 | // android.permission.CAMERA to operate. These permissions must be mirrored in the manifest.
703 | private val requiredPermissions = Arrays.asList(permission.CAMERA, permission.WRITE_EXTERNAL_STORAGE)
704 |
705 | // Randomly generated UUID and custom MIME type to mark the anchor track for this sample.
706 | private val ANCHOR_TRACK_ID = UUID.fromString("a65e59fc-2e13-4607-b514-35302121c138")
707 | private const val ANCHOR_TRACK_MIME_TYPE = "application/hello-recording-playback-anchor"
708 | private val DEFAULT_COLOR = floatArrayOf(0f, 0f, 0f, 0f)
709 | private const val SEARCHING_PLANE_MESSAGE = "Searching for surfaces..."
710 |
711 | /** Generates a new MP4 dataset filename based on the current system time. */
712 | private val newMp4DatasetFilename: String
713 | private get() = String.format(
714 | Locale.ENGLISH,
715 | MP4_DATASET_FILENAME_TEMPLATE,
716 | DateTime.now().toString(MP4_DATASET_TIMESTAMP_FORMAT))
717 | }
718 | }
--------------------------------------------------------------------------------