├── .gitignore ├── LICENSE ├── README.md ├── app ├── .gitignore ├── build.gradle ├── proguard-rules.pro └── src │ └── main │ ├── AndroidManifest.xml │ ├── assets │ ├── models │ │ ├── anchor.obj │ │ ├── anchor.png │ │ ├── andy.obj │ │ ├── andy.png │ │ ├── andy_shadow.obj │ │ ├── andy_shadow.png │ │ ├── andy_spec.png │ │ ├── ear_fur.png │ │ ├── forehead_left.obj │ │ ├── forehead_right.obj │ │ ├── freckles.png │ │ ├── map_quality_bar.obj │ │ ├── map_quality_bar.png │ │ ├── nose.obj │ │ ├── nose_fur.png │ │ └── trigrid.png │ └── shaders │ │ ├── ar_object.frag │ │ ├── ar_object.vert │ │ ├── background_show_depth_color_visualization.frag │ │ ├── background_show_depth_color_visualization.vert │ │ ├── object.frag │ │ ├── object.vert │ │ ├── plane.frag │ │ ├── plane.vert │ │ ├── point_cloud.frag │ │ ├── point_cloud.vert │ │ ├── screenquad.frag │ │ └── screenquad.vert │ ├── java │ └── com │ │ └── google │ │ └── ar │ │ └── core │ │ └── examples │ │ └── java │ │ ├── augmentedfaces │ │ ├── AugmentedFaceRenderer.java │ │ ├── AugmentedFacesActivity.java │ │ ├── MessageSender.java │ │ ├── ServerMessage.java │ │ ├── TcpClientHandler.java │ │ └── TcpServerService.java │ │ └── common │ │ ├── helpers │ │ ├── CameraPermissionHelper.java │ │ ├── DepthSettings.java │ │ ├── DisplayRotationHelper.java │ │ ├── FullScreenHelper.java │ │ ├── InstantPlacementSettings.java │ │ ├── SnackbarHelper.java │ │ ├── TapHelper.java │ │ └── TrackingStateHelper.java │ │ └── rendering │ │ ├── BackgroundRenderer.java │ │ ├── ObjectRenderer.java │ │ ├── PlaneRenderer.java │ │ ├── PointCloudRenderer.java │ │ └── ShaderUtil.java │ └── res │ ├── drawable-xxhdpi │ └── ic_launcher.png │ ├── layout │ └── activity_main.xml │ └── values │ ├── strings.xml │ └── styles.xml ├── build.gradle ├── gradle.properties ├── gradle └── wrapper │ ├── gradle-wrapper.jar │ └── gradle-wrapper.properties ├── gradlew ├── gradlew.bat └── settings.gradle /.gitignore: -------------------------------------------------------------------------------- 1 | # Android Studio configuration. 2 | *.iml 3 | .idea/ 4 | 5 | # Gradle configuration. 6 | .gradle/ 7 | build/ 8 | 9 | # User configuration. 10 | local.properties 11 | 12 | # OS configurations. 13 | .DS_Store 14 | 15 | # Android NDK cmake output. 16 | .cxx/ 17 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 Maxime Dupart 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # AndroidArcoreFacesStreaming 2 | 3 | From any Android phone ArCore compatible, using this app will send over TCP 5680 bytes messages: 4 | The first 5616 bytes is a vertex buffer of 468 points mapping the user face in local head space (468 x 3 floats). 5 | Following 64 bytes are the coefficient of a 4x4 Matrix (16floats) representing the transform of the Head bone in world space. 6 | 7 | Built with Android Studio 4.2.1 8 | 9 | Thread: 10 | https://forums.unrealengine.com/t/face-capture-with-android-metahuman-download-links-free-open-source-demo/234927 11 | 12 | ![alt text](https://i.imgur.com/T9EV1fr.png) 13 | 14 | ![alt text](https://i.imgur.com/uC77IqQl.jpg) 15 | 16 | -------------------------------------------------------------------------------- /app/.gitignore: -------------------------------------------------------------------------------- 1 | /build 2 | google-services.json 3 | -------------------------------------------------------------------------------- /app/build.gradle: -------------------------------------------------------------------------------- 1 | apply plugin: 'com.android.application' 2 | 3 | android { 4 | compileSdkVersion 29 5 | defaultConfig { 6 | applicationId "com.google.ar.core.examples.java.augmentedfaces" 7 | 8 | // AR Optional apps must declare minSdkVersion >= 14. 9 | // AR Required apps must declare minSdkVersion >= 24. 10 | minSdkVersion 24 11 | targetSdkVersion 29 12 | versionCode 1 13 | versionName '1.0' 14 | } 15 | compileOptions { 16 | sourceCompatibility JavaVersion.VERSION_1_8 17 | targetCompatibility JavaVersion.VERSION_1_8 18 | } 19 | buildTypes { 20 | release { 21 | minifyEnabled false 22 | proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' 23 | } 24 | } 25 | } 26 | 27 | dependencies { 28 | // ARCore (Google Play Services for AR) library. 29 | implementation 'com.google.ar:core:1.24.0' 30 | 31 | // Obj - a simple Wavefront OBJ file loader 32 | // https://github.com/javagl/Obj 33 | implementation 'de.javagl:obj:0.2.1' 34 | 35 | implementation 'androidx.appcompat:appcompat:1.1.0' 36 | implementation 'com.google.android.material:material:1.1.0' 37 | } 38 | -------------------------------------------------------------------------------- /app/proguard-rules.pro: -------------------------------------------------------------------------------- 1 | # Add project specific ProGuard rules here. 2 | # By default, the flags in this file are appended to flags specified 3 | # in /opt/android-sdk/tools/proguard/proguard-android.txt 4 | # You can edit the include path and order by changing the proguardFiles 5 | # directive in build.gradle. 6 | # 7 | # For more details, see 8 | # http://developer.android.com/guide/developing/tools/proguard.html 9 | 10 | # Add any project specific keep options here: 11 | 12 | # If your project uses WebView with JS, uncomment the following 13 | # and specify the fully qualified class name to the JavaScript interface 14 | # class: 15 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview { 16 | # public *; 17 | #} 18 | -------------------------------------------------------------------------------- /app/src/main/AndroidManifest.xml: -------------------------------------------------------------------------------- 1 | 2 | 17 | 20 | 21 | 22 | 23 | 24 | 26 | 27 | 28 | 29 | 36 | 37 | 43 | 44 | 45 | 46 | 47 | 48 | 52 | 53 | 54 | 55 | 56 | -------------------------------------------------------------------------------- /app/src/main/assets/models/anchor.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MaximeDup/AndroidArcoreFacesStreaming/7d1d47cd1d320f9e2e0037d76280f58b3919555e/app/src/main/assets/models/anchor.png -------------------------------------------------------------------------------- /app/src/main/assets/models/andy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MaximeDup/AndroidArcoreFacesStreaming/7d1d47cd1d320f9e2e0037d76280f58b3919555e/app/src/main/assets/models/andy.png -------------------------------------------------------------------------------- /app/src/main/assets/models/andy_shadow.obj: -------------------------------------------------------------------------------- 1 | # This file uses centimeters as units for non-parametric coordinates. 2 | 3 | g default 4 | v -0.100000 -0.000000 0.100000 5 | v 0.100000 -0.000000 0.100000 6 | v -0.100000 0.000000 -0.100000 7 | v 0.100000 0.000000 -0.100000 8 | vt 0.000000 0.000000 9 | vt 1.000000 0.000000 10 | vt 0.000000 1.000000 11 | vt 1.000000 1.000000 12 | vn 0.000000 1.000000 0.000000 13 | vn 0.000000 1.000000 0.000000 14 | vn 0.000000 1.000000 0.000000 15 | vn 0.000000 1.000000 0.000000 16 | s off 17 | g AndyBlobShadow_GEO 18 | f 4/4/1 3/3/2 1/1/3 2/2/4 19 | -------------------------------------------------------------------------------- /app/src/main/assets/models/andy_shadow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MaximeDup/AndroidArcoreFacesStreaming/7d1d47cd1d320f9e2e0037d76280f58b3919555e/app/src/main/assets/models/andy_shadow.png -------------------------------------------------------------------------------- /app/src/main/assets/models/andy_spec.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MaximeDup/AndroidArcoreFacesStreaming/7d1d47cd1d320f9e2e0037d76280f58b3919555e/app/src/main/assets/models/andy_spec.png -------------------------------------------------------------------------------- /app/src/main/assets/models/ear_fur.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MaximeDup/AndroidArcoreFacesStreaming/7d1d47cd1d320f9e2e0037d76280f58b3919555e/app/src/main/assets/models/ear_fur.png -------------------------------------------------------------------------------- /app/src/main/assets/models/forehead_left.obj: -------------------------------------------------------------------------------- 1 | # This file uses centimeters as units for non-parametric coordinates. 2 | 3 | mtllib forehead_left.mtl 4 | g skinCluster1Set tweakSet1 5 | v 0.088114 0.052146 0.025014 6 | v 0.034652 0.109164 0.007964 7 | v 0.046345 -0.000129 -0.018835 8 | v -0.007117 0.056890 -0.035885 9 | vt 0.005966 0.994333 10 | vt 0.993035 0.994333 11 | vt 0.005966 0.006373 12 | vt 0.993035 0.006373 13 | vn -0.529919 -0.255013 0.808798 14 | vn -0.529919 -0.255013 0.808798 15 | vn -0.529919 -0.255013 0.808798 16 | vn -0.529919 -0.255013 0.808798 17 | s 1 18 | g earLeft_mesh asset 19 | usemtl earLeft_meshSG 20 | f 4/4/1 1/1/2 2/2/3 21 | f 1/1/2 4/4/1 3/3/4 22 | -------------------------------------------------------------------------------- /app/src/main/assets/models/forehead_right.obj: -------------------------------------------------------------------------------- 1 | # This file uses centimeters as units for non-parametric coordinates. 2 | 3 | mtllib forehead_right.mtl 4 | g skinCluster2Set tweakSet2 5 | v -0.046329 -0.000137 -0.018811 6 | v 0.007133 0.056882 -0.035861 7 | v -0.088098 0.052138 0.025037 8 | v -0.034636 0.109157 0.007988 9 | vt 0.002194 0.001364 10 | vt 0.995606 0.001364 11 | vt 0.002194 0.996801 12 | vt 0.995606 0.996801 13 | vn 0.529919 -0.255013 0.808798 14 | vn 0.529919 -0.255013 0.808798 15 | vn 0.529919 -0.255013 0.808798 16 | vn 0.529919 -0.255013 0.808798 17 | s 1 18 | g earRight_mesh asset 19 | usemtl earLeft_meshSG 20 | f 3/3/1 2/2/2 4/4/3 21 | f 2/2/2 3/3/1 1/1/4 22 | -------------------------------------------------------------------------------- /app/src/main/assets/models/freckles.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MaximeDup/AndroidArcoreFacesStreaming/7d1d47cd1d320f9e2e0037d76280f58b3919555e/app/src/main/assets/models/freckles.png -------------------------------------------------------------------------------- /app/src/main/assets/models/map_quality_bar.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MaximeDup/AndroidArcoreFacesStreaming/7d1d47cd1d320f9e2e0037d76280f58b3919555e/app/src/main/assets/models/map_quality_bar.png -------------------------------------------------------------------------------- /app/src/main/assets/models/nose_fur.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MaximeDup/AndroidArcoreFacesStreaming/7d1d47cd1d320f9e2e0037d76280f58b3919555e/app/src/main/assets/models/nose_fur.png -------------------------------------------------------------------------------- /app/src/main/assets/models/trigrid.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MaximeDup/AndroidArcoreFacesStreaming/7d1d47cd1d320f9e2e0037d76280f58b3919555e/app/src/main/assets/models/trigrid.png -------------------------------------------------------------------------------- /app/src/main/assets/shaders/ar_object.frag: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | precision mediump float; 18 | 19 | uniform sampler2D u_Texture; 20 | 21 | uniform vec4 u_LightingParameters; 22 | uniform vec4 u_MaterialParameters; 23 | uniform vec4 u_ColorCorrectionParameters; 24 | 25 | #if USE_DEPTH_FOR_OCCLUSION 26 | uniform sampler2D u_DepthTexture; 27 | uniform mat3 u_DepthUvTransform; 28 | uniform float u_DepthAspectRatio; 29 | #endif // USE_DEPTH_FOR_OCCLUSION 30 | 31 | varying vec3 v_ViewPosition; 32 | varying vec3 v_ViewNormal; 33 | varying vec2 v_TexCoord; 34 | varying vec3 v_ScreenSpacePosition; 35 | uniform vec4 u_ObjColor; 36 | 37 | #if USE_DEPTH_FOR_OCCLUSION 38 | 39 | float DepthGetMillimeters(in sampler2D depth_texture, in vec2 depth_uv) { 40 | // Depth is packed into the red and green components of its texture. 41 | // The texture is a normalized format, storing millimeters. 42 | vec3 packedDepthAndVisibility = texture2D(depth_texture, depth_uv).xyz; 43 | return dot(packedDepthAndVisibility.xy, vec2(255.0, 256.0 * 255.0)); 44 | } 45 | 46 | // Returns linear interpolation position of value between min and max bounds. 47 | // E.g., DepthInverseLerp(1100, 1000, 2000) returns 0.1. 48 | float DepthInverseLerp(in float value, in float min_bound, in float max_bound) { 49 | return clamp((value - min_bound) / (max_bound - min_bound), 0.0, 1.0); 50 | } 51 | 52 | // Returns a value between 0.0 (not visible) and 1.0 (completely visible) 53 | // Which represents how visible or occluded is the pixel in relation to the 54 | // depth map. 55 | float DepthGetVisibility(in sampler2D depth_texture, in vec2 depth_uv, 56 | in float asset_depth_mm) { 57 | float depth_mm = DepthGetMillimeters(depth_texture, depth_uv); 58 | 59 | // Instead of a hard z-buffer test, allow the asset to fade into the 60 | // background along a 2 * kDepthTolerancePerMm * asset_depth_mm 61 | // range centered on the background depth. 62 | const float kDepthTolerancePerMm = 0.015; 63 | float visibility_occlusion = clamp(0.5 * (depth_mm - asset_depth_mm) / 64 | (kDepthTolerancePerMm * asset_depth_mm) + 0.5, 0.0, 1.0); 65 | 66 | // Depth close to zero is most likely invalid, do not use it for occlusions. 67 | float visibility_depth_near = 1.0 - DepthInverseLerp( 68 | depth_mm, /*min_depth_mm=*/150.0, /*max_depth_mm=*/200.0); 69 | 70 | // Same for very high depth values. 71 | float visibility_depth_far = DepthInverseLerp( 72 | depth_mm, /*min_depth_mm=*/7500.0, /*max_depth_mm=*/8000.0); 73 | 74 | const float kOcclusionAlpha = 0.0; 75 | float visibility = 76 | max(max(visibility_occlusion, kOcclusionAlpha), 77 | max(visibility_depth_near, visibility_depth_far)); 78 | 79 | return visibility; 80 | } 81 | 82 | float DepthGetBlurredVisibilityAroundUV(in sampler2D depth_texture, in vec2 uv, 83 | in float asset_depth_mm) { 84 | // Kernel used: 85 | // 0 4 7 4 0 86 | // 4 16 26 16 4 87 | // 7 26 41 26 7 88 | // 4 16 26 16 4 89 | // 0 4 7 4 0 90 | const float kKernelTotalWeights = 269.0; 91 | float sum = 0.0; 92 | 93 | const float kOcclusionBlurAmount = 0.01; 94 | vec2 blurriness = vec2(kOcclusionBlurAmount, 95 | kOcclusionBlurAmount * u_DepthAspectRatio); 96 | 97 | float current = 0.0; 98 | 99 | current += DepthGetVisibility(depth_texture, uv + vec2(-1.0, -2.0) * blurriness, asset_depth_mm); 100 | current += DepthGetVisibility(depth_texture, uv + vec2(+1.0, -2.0) * blurriness, asset_depth_mm); 101 | current += DepthGetVisibility(depth_texture, uv + vec2(-1.0, +2.0) * blurriness, asset_depth_mm); 102 | current += DepthGetVisibility(depth_texture, uv + vec2(+1.0, +2.0) * blurriness, asset_depth_mm); 103 | current += DepthGetVisibility(depth_texture, uv + vec2(-2.0, +1.0) * blurriness, asset_depth_mm); 104 | current += DepthGetVisibility(depth_texture, uv + vec2(+2.0, +1.0) * blurriness, asset_depth_mm); 105 | current += DepthGetVisibility(depth_texture, uv + vec2(-2.0, -1.0) * blurriness, asset_depth_mm); 106 | current += DepthGetVisibility(depth_texture, uv + vec2(+2.0, -1.0) * blurriness, asset_depth_mm); 107 | sum += current * 4.0; 108 | 109 | current = 0.0; 110 | current += DepthGetVisibility(depth_texture, uv + vec2(-2.0, -0.0) * blurriness, asset_depth_mm); 111 | current += DepthGetVisibility(depth_texture, uv + vec2(+2.0, +0.0) * blurriness, asset_depth_mm); 112 | current += DepthGetVisibility(depth_texture, uv + vec2(+0.0, +2.0) * blurriness, asset_depth_mm); 113 | current += DepthGetVisibility(depth_texture, uv + vec2(-0.0, -2.0) * blurriness, asset_depth_mm); 114 | sum += current * 7.0; 115 | 116 | current = 0.0; 117 | current += DepthGetVisibility(depth_texture, uv + vec2(-1.0, -1.0) * blurriness, asset_depth_mm); 118 | current += DepthGetVisibility(depth_texture, uv + vec2(+1.0, -1.0) * blurriness, asset_depth_mm); 119 | current += DepthGetVisibility(depth_texture, uv + vec2(-1.0, +1.0) * blurriness, asset_depth_mm); 120 | current += DepthGetVisibility(depth_texture, uv + vec2(+1.0, +1.0) * blurriness, asset_depth_mm); 121 | sum += current * 16.0; 122 | 123 | current = 0.0; 124 | current += DepthGetVisibility(depth_texture, uv + vec2(+0.0, +1.0) * blurriness, asset_depth_mm); 125 | current += DepthGetVisibility(depth_texture, uv + vec2(-0.0, -1.0) * blurriness, asset_depth_mm); 126 | current += DepthGetVisibility(depth_texture, uv + vec2(-1.0, -0.0) * blurriness, asset_depth_mm); 127 | current += DepthGetVisibility(depth_texture, uv + vec2(+1.0, +0.0) * blurriness, asset_depth_mm); 128 | sum += current * 26.0; 129 | 130 | sum += DepthGetVisibility(depth_texture, uv , asset_depth_mm) * 41.0; 131 | 132 | return sum / kKernelTotalWeights; 133 | } 134 | 135 | #endif // USE_DEPTH_FOR_OCCLUSION 136 | 137 | void main() { 138 | // We support approximate sRGB gamma. 139 | const float kGamma = 0.4545454; 140 | const float kInverseGamma = 2.2; 141 | const float kMiddleGrayGamma = 0.466; 142 | 143 | // Unpack lighting and material parameters for better naming. 144 | vec3 viewLightDirection = u_LightingParameters.xyz; 145 | vec3 colorShift = u_ColorCorrectionParameters.rgb; 146 | float averagePixelIntensity = u_ColorCorrectionParameters.a; 147 | 148 | float materialAmbient = u_MaterialParameters.x; 149 | float materialDiffuse = u_MaterialParameters.y; 150 | float materialSpecular = u_MaterialParameters.z; 151 | float materialSpecularPower = u_MaterialParameters.w; 152 | 153 | // Normalize varying parameters, because they are linearly interpolated in the vertex shader. 154 | vec3 viewFragmentDirection = normalize(v_ViewPosition); 155 | vec3 viewNormal = normalize(v_ViewNormal); 156 | 157 | // Flip the y-texture coordinate to address the texture from top-left. 158 | vec4 objectColor = texture2D(u_Texture, vec2(v_TexCoord.x, 1.0 - v_TexCoord.y)); 159 | 160 | // Apply color to grayscale image only if the alpha of u_ObjColor is 161 | // greater and equal to 255.0. 162 | objectColor.rgb *= mix(vec3(1.0), u_ObjColor.rgb / 255.0, 163 | step(255.0, u_ObjColor.a)); 164 | 165 | // Apply inverse SRGB gamma to the texture before making lighting calculations. 166 | objectColor.rgb = pow(objectColor.rgb, vec3(kInverseGamma)); 167 | 168 | // Ambient light is unaffected by the light intensity. 169 | float ambient = materialAmbient; 170 | 171 | // Approximate a hemisphere light (not a harsh directional light). 172 | float diffuse = materialDiffuse * 173 | 0.5 * (dot(viewNormal, viewLightDirection) + 1.0); 174 | 175 | // Compute specular light. Textures are loaded with with premultiplied alpha 176 | // (https://developer.android.com/reference/android/graphics/BitmapFactory.Options#inPremultiplied), 177 | // so premultiply the specular color by alpha as well. 178 | vec3 reflectedLightDirection = reflect(viewLightDirection, viewNormal); 179 | float specularStrength = max(0.0, dot(viewFragmentDirection, reflectedLightDirection)); 180 | float specular = objectColor.a * materialSpecular * 181 | pow(specularStrength, materialSpecularPower); 182 | 183 | vec3 color = objectColor.rgb * (ambient + diffuse) + specular; 184 | // Apply SRGB gamma before writing the fragment color. 185 | color.rgb = pow(color, vec3(kGamma)); 186 | // Apply average pixel intensity and color shift 187 | color *= colorShift * (averagePixelIntensity / kMiddleGrayGamma); 188 | gl_FragColor.rgb = color; 189 | gl_FragColor.a = objectColor.a; 190 | 191 | #if USE_DEPTH_FOR_OCCLUSION 192 | const float kMetersToMillimeters = 1000.0; 193 | float asset_depth_mm = v_ViewPosition.z * kMetersToMillimeters * -1.; 194 | // Computes the texture coordinates to sample from the depth image. 195 | vec2 depth_uvs = (u_DepthUvTransform * vec3(v_ScreenSpacePosition.xy, 1)).xy; 196 | 197 | // The following step is very costly. Replace the last line with the 198 | // commented line if it's too expensive. 199 | // gl_FragColor *= DepthGetVisibility(u_DepthTexture, depth_uvs, asset_depth_mm); 200 | gl_FragColor *= DepthGetBlurredVisibilityAroundUV(u_DepthTexture, depth_uvs, asset_depth_mm); 201 | #endif // USE_DEPTH_FOR_OCCLUSION 202 | } 203 | -------------------------------------------------------------------------------- /app/src/main/assets/shaders/ar_object.vert: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | uniform mat4 u_ModelView; 18 | uniform mat4 u_ModelViewProjection; 19 | 20 | attribute vec4 a_Position; 21 | attribute vec3 a_Normal; 22 | attribute vec2 a_TexCoord; 23 | 24 | varying vec3 v_ViewPosition; 25 | varying vec3 v_ViewNormal; 26 | varying vec2 v_TexCoord; 27 | varying vec3 v_ScreenSpacePosition; 28 | 29 | void main() { 30 | v_ViewPosition = (u_ModelView * a_Position).xyz; 31 | v_ViewNormal = normalize((u_ModelView * vec4(a_Normal, 0.0)).xyz); 32 | v_TexCoord = a_TexCoord; 33 | gl_Position = u_ModelViewProjection * a_Position; 34 | v_ScreenSpacePosition = gl_Position.xyz / gl_Position.w; 35 | } 36 | -------------------------------------------------------------------------------- /app/src/main/assets/shaders/background_show_depth_color_visualization.frag: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | precision mediump float; 18 | 19 | uniform sampler2D u_DepthTexture; 20 | 21 | varying vec2 v_TexCoord; 22 | 23 | const highp float kMaxDepth = 8000.0; // In millimeters. 24 | 25 | float DepthGetMillimeters(in sampler2D depth_texture, in vec2 depth_uv) { 26 | // Depth is packed into the red and green components of its texture. 27 | // The texture is a normalized format, storing millimeters. 28 | vec3 packedDepthAndVisibility = texture2D(depth_texture, depth_uv).xyz; 29 | return dot(packedDepthAndVisibility.xy, vec2(255.0, 256.0 * 255.0)); 30 | } 31 | 32 | // Returns a color corresponding to the depth passed in. Colors range from red 33 | // to green to blue, where red is closest and blue is farthest. 34 | // 35 | // Uses Turbo color mapping: 36 | // https://ai.googleblog.com/2019/08/turbo-improved-rainbow-colormap-for.html 37 | vec3 DepthGetColorVisualization(in float x) { 38 | const vec4 kRedVec4 = vec4(0.55305649, 3.00913185, -5.46192616, -11.11819092); 39 | const vec4 kGreenVec4 = vec4(0.16207513, 0.17712472, 15.24091500, -36.50657960); 40 | const vec4 kBlueVec4 = vec4(-0.05195877, 5.18000081, -30.94853351, 81.96403246); 41 | const vec2 kRedVec2 = vec2(27.81927491, -14.87899417); 42 | const vec2 kGreenVec2 = vec2(25.95549545, -5.02738237); 43 | const vec2 kBlueVec2 = vec2(-86.53476570, 30.23299484); 44 | const float kInvalidDepthThreshold = 0.01; 45 | 46 | // Adjusts color space via 6 degree poly interpolation to avoid pure red. 47 | x = clamp(x * 0.9 + 0.03, 0.0, 1.0); 48 | vec4 v4 = vec4(1.0, x, x * x, x * x * x); 49 | vec2 v2 = v4.zw * v4.z; 50 | vec3 polynomial_color = vec3( 51 | dot(v4, kRedVec4) + dot(v2, kRedVec2), 52 | dot(v4, kGreenVec4) + dot(v2, kGreenVec2), 53 | dot(v4, kBlueVec4) + dot(v2, kBlueVec2) 54 | ); 55 | 56 | return step(kInvalidDepthThreshold, x) * polynomial_color; 57 | } 58 | 59 | void main() { 60 | highp float normalized_depth = 61 | clamp(DepthGetMillimeters(u_DepthTexture, v_TexCoord.xy) / kMaxDepth, 62 | 0.0, 1.0); 63 | vec4 depth_color = vec4(DepthGetColorVisualization(normalized_depth), 1.0); 64 | gl_FragColor = depth_color; 65 | } 66 | -------------------------------------------------------------------------------- /app/src/main/assets/shaders/background_show_depth_color_visualization.vert: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | attribute vec4 a_Position; 18 | attribute vec2 a_TexCoord; 19 | 20 | varying vec2 v_TexCoord; 21 | 22 | void main() { 23 | v_TexCoord = a_TexCoord; 24 | gl_Position = a_Position; 25 | } 26 | -------------------------------------------------------------------------------- /app/src/main/assets/shaders/object.frag: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | precision mediump float; 18 | 19 | uniform sampler2D u_Texture; 20 | 21 | uniform vec4 u_LightingParameters; 22 | uniform vec4 u_MaterialParameters; 23 | uniform vec4 u_ColorCorrectionParameters; 24 | 25 | varying vec3 v_ViewPosition; 26 | varying vec3 v_ViewNormal; 27 | varying vec2 v_TexCoord; 28 | uniform vec4 u_ObjColor; 29 | 30 | void main() { 31 | 32 | gl_FragColor.rgb = vec3(0.0,1.0,0.0); 33 | return; 34 | 35 | 36 | // We support approximate sRGB gamma. 37 | const float kGamma = 0.4545454; 38 | const float kInverseGamma = 2.2; 39 | const float kMiddleGrayGamma = 0.466; 40 | 41 | // Unpack lighting and material parameters for better naming. 42 | vec3 viewLightDirection = u_LightingParameters.xyz; 43 | vec3 colorShift = u_ColorCorrectionParameters.rgb; 44 | float averagePixelIntensity = u_ColorCorrectionParameters.a; 45 | 46 | float materialAmbient = u_MaterialParameters.x; 47 | float materialDiffuse = u_MaterialParameters.y; 48 | float materialSpecular = u_MaterialParameters.z; 49 | float materialSpecularPower = u_MaterialParameters.w; 50 | 51 | // Normalize varying parameters, because they are linearly interpolated in the vertex shader. 52 | vec3 viewFragmentDirection = normalize(v_ViewPosition); 53 | vec3 viewNormal = normalize(v_ViewNormal); 54 | 55 | // Flip the y-texture coordinate to address the texture from top-left. 56 | vec4 objectColor = texture2D(u_Texture, vec2(v_TexCoord.x, 1.0 - v_TexCoord.y)); 57 | 58 | // Apply color to grayscale image only if the alpha of u_ObjColor is 59 | // greater and equal to 255.0. 60 | objectColor.rgb *= mix(vec3(1.0), u_ObjColor.rgb / 255.0, 61 | step(255.0, u_ObjColor.a)); 62 | 63 | // Apply inverse SRGB gamma to the texture before making lighting calculations. 64 | objectColor.rgb = pow(objectColor.rgb, vec3(kInverseGamma)); 65 | 66 | // Ambient light is unaffected by the light intensity. 67 | float ambient = materialAmbient; 68 | 69 | // Approximate a hemisphere light (not a harsh directional light). 70 | float diffuse = materialDiffuse * 71 | 0.5 * (dot(viewNormal, viewLightDirection) + 1.0); 72 | 73 | // Compute specular light. 74 | vec3 reflectedLightDirection = reflect(viewLightDirection, viewNormal); 75 | float specularStrength = max(0.0, dot(viewFragmentDirection, reflectedLightDirection)); 76 | float specular = materialSpecular * 77 | pow(specularStrength, materialSpecularPower); 78 | 79 | vec3 color = objectColor.rgb * (ambient + diffuse) + specular; 80 | // Apply SRGB gamma before writing the fragment color. 81 | color.rgb = pow(color, vec3(kGamma)); 82 | // Apply average pixel intensity and color shift 83 | color *= colorShift * (averagePixelIntensity / kMiddleGrayGamma); 84 | //gl_FragColor.rgb = color; 85 | gl_FragColor.a = objectColor.a; 86 | 87 | gl_FragColor.rgb = vec3(0.0,1.0,0.0); 88 | // gl_FragColor.a = 1.0; 89 | 90 | } 91 | -------------------------------------------------------------------------------- /app/src/main/assets/shaders/object.vert: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | uniform mat4 u_ModelView; 18 | uniform mat4 u_ModelViewProjection; 19 | 20 | attribute vec4 a_Position; 21 | attribute vec3 a_Normal; 22 | attribute vec2 a_TexCoord; 23 | 24 | varying vec3 v_ViewPosition; 25 | varying vec3 v_ViewNormal; 26 | varying vec2 v_TexCoord; 27 | 28 | void main() { 29 | v_ViewPosition = (u_ModelView * a_Position).xyz; 30 | v_ViewNormal = normalize((u_ModelView * vec4(a_Normal, 0.0)).xyz); 31 | v_TexCoord = a_TexCoord; 32 | gl_Position = u_ModelViewProjection * a_Position; 33 | gl_PointSize = 10.0; 34 | } 35 | -------------------------------------------------------------------------------- /app/src/main/assets/shaders/plane.frag: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | precision highp float; 18 | uniform sampler2D u_Texture; 19 | uniform vec4 u_gridControl; // dotThreshold, lineThreshold, lineFadeShrink, occlusionShrink 20 | varying vec3 v_TexCoordAlpha; 21 | 22 | void main() { 23 | vec4 control = texture2D(u_Texture, v_TexCoordAlpha.xy); 24 | float dotScale = v_TexCoordAlpha.z; 25 | float lineFade = max(0.0, u_gridControl.z * v_TexCoordAlpha.z - (u_gridControl.z - 1.0)); 26 | float alpha = (control.r * dotScale > u_gridControl.x) ? 1.0 27 | : (control.g > u_gridControl.y) ? lineFade 28 | : (0.25 * lineFade); 29 | gl_FragColor = vec4(alpha * v_TexCoordAlpha.z); 30 | } 31 | -------------------------------------------------------------------------------- /app/src/main/assets/shaders/plane.vert: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | uniform mat4 u_Model; 18 | uniform mat4 u_ModelViewProjection; 19 | uniform mat2 u_PlaneUvMatrix; 20 | uniform vec3 u_Normal; 21 | 22 | attribute vec3 a_XZPositionAlpha; // (x, z, alpha) 23 | 24 | varying vec3 v_TexCoordAlpha; 25 | 26 | void main() { 27 | vec4 local_pos = vec4(a_XZPositionAlpha.x, 0.0, a_XZPositionAlpha.y, 1.0); 28 | vec4 world_pos = u_Model * local_pos; 29 | 30 | // Construct two vectors that are orthogonal to the normal. 31 | // This arbitrary choice is not co-linear with either horizontal 32 | // or vertical plane normals. 33 | const vec3 arbitrary = vec3(1.0, 1.0, 0.0); 34 | vec3 vec_u = normalize(cross(u_Normal, arbitrary)); 35 | vec3 vec_v = normalize(cross(u_Normal, vec_u)); 36 | 37 | // Project vertices in world frame onto vec_u and vec_v. 38 | vec2 uv = vec2(dot(world_pos.xyz, vec_u), dot(world_pos.xyz, vec_v)); 39 | v_TexCoordAlpha = vec3(u_PlaneUvMatrix * uv, a_XZPositionAlpha.z); 40 | gl_Position = u_ModelViewProjection * local_pos; 41 | } 42 | -------------------------------------------------------------------------------- /app/src/main/assets/shaders/point_cloud.frag: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | precision mediump float; 18 | varying vec4 v_Color; 19 | 20 | void main() { 21 | gl_FragColor = v_Color; 22 | } 23 | -------------------------------------------------------------------------------- /app/src/main/assets/shaders/point_cloud.vert: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | uniform mat4 u_ModelViewProjection; 18 | uniform vec4 u_Color; 19 | uniform float u_PointSize; 20 | 21 | attribute vec4 a_Position; 22 | 23 | varying vec4 v_Color; 24 | 25 | void main() { 26 | v_Color = u_Color; 27 | gl_Position = u_ModelViewProjection * vec4(a_Position.xyz, 1.0); 28 | gl_PointSize = u_PointSize; 29 | } 30 | -------------------------------------------------------------------------------- /app/src/main/assets/shaders/screenquad.frag: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | #extension GL_OES_EGL_image_external : require 17 | 18 | precision mediump float; 19 | varying vec2 v_TexCoord; 20 | uniform samplerExternalOES sTexture; 21 | 22 | 23 | void main() { 24 | gl_FragColor = texture2D(sTexture, v_TexCoord); 25 | } 26 | -------------------------------------------------------------------------------- /app/src/main/assets/shaders/screenquad.vert: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | attribute vec4 a_Position; 18 | attribute vec2 a_TexCoord; 19 | 20 | varying vec2 v_TexCoord; 21 | 22 | void main() { 23 | gl_Position = a_Position; 24 | v_TexCoord = a_TexCoord; 25 | } 26 | -------------------------------------------------------------------------------- /app/src/main/java/com/google/ar/core/examples/java/augmentedfaces/AugmentedFaceRenderer.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package com.google.ar.core.examples.java.augmentedfaces; 18 | 19 | import static com.google.ar.core.examples.java.common.rendering.ShaderUtil.loadGLShader; 20 | 21 | import android.content.Context; 22 | import android.graphics.Bitmap; 23 | import android.graphics.BitmapFactory; 24 | import android.opengl.GLES20; 25 | import android.opengl.GLUtils; 26 | import android.opengl.Matrix; 27 | import com.google.ar.core.AugmentedFace; 28 | import java.io.IOException; 29 | import java.net.ServerSocket; 30 | import java.nio.FloatBuffer; 31 | import java.nio.ShortBuffer; 32 | 33 | /** Renders an AugmentedFace on screen in OpenGL. */ 34 | public class AugmentedFaceRenderer { 35 | private static final String TAG = AugmentedFaceRenderer.class.getSimpleName(); 36 | 37 | private int modelViewUniform; 38 | private int modelViewProjectionUniform; 39 | 40 | private int textureUniform; 41 | 42 | private int lightingParametersUniform; 43 | 44 | private int materialParametersUniform; 45 | 46 | private int colorCorrectionParameterUniform; 47 | 48 | private int tintColorUniform; 49 | 50 | private int attriVertices; 51 | private int attriUvs; 52 | private int attriNormals; 53 | 54 | // Set some default material properties to use for lighting. 55 | private float ambient = 0.3f; 56 | private float diffuse = 1.0f; 57 | private float specular = 1.0f; 58 | private float specularPower = 6.0f; 59 | 60 | private final int[] textureId = new int[1]; 61 | 62 | private static final float[] lightDirection = new float[] {0.0f, 1.0f, 0.0f, 0.0f}; 63 | private static final String VERTEX_SHADER_NAME = "shaders/object.vert"; 64 | private static final String FRAGMENT_SHADER_NAME = "shaders/object.frag"; 65 | private int program; 66 | private final float[] modelViewProjectionMat = new float[16]; 67 | private final float[] modelViewMat = new float[16]; 68 | private final float[] viewLightDirection = new float[4]; 69 | 70 | public AugmentedFaceRenderer() {} 71 | 72 | public void createOnGlThread(Context context, String diffuseTextureAssetName) throws IOException { 73 | final int vertexShader = 74 | loadGLShader(TAG, context, GLES20.GL_VERTEX_SHADER, VERTEX_SHADER_NAME); 75 | final int fragmentShader = 76 | loadGLShader(TAG, context, GLES20.GL_FRAGMENT_SHADER, FRAGMENT_SHADER_NAME); 77 | 78 | program = GLES20.glCreateProgram(); 79 | GLES20.glAttachShader(program, vertexShader); 80 | GLES20.glAttachShader(program, fragmentShader); 81 | GLES20.glLinkProgram(program); 82 | 83 | modelViewProjectionUniform = GLES20.glGetUniformLocation(program, "u_ModelViewProjection"); 84 | modelViewUniform = GLES20.glGetUniformLocation(program, "u_ModelView"); 85 | textureUniform = GLES20.glGetUniformLocation(program, "u_Texture"); 86 | 87 | lightingParametersUniform = GLES20.glGetUniformLocation(program, "u_LightningParameters"); 88 | materialParametersUniform = GLES20.glGetUniformLocation(program, "u_MaterialParameters"); 89 | colorCorrectionParameterUniform = 90 | GLES20.glGetUniformLocation(program, "u_ColorCorrectionParameters"); 91 | tintColorUniform = GLES20.glGetUniformLocation(program, "u_TintColor"); 92 | 93 | attriVertices = GLES20.glGetAttribLocation(program, "a_Position"); 94 | attriUvs = GLES20.glGetAttribLocation(program, "a_TexCoord"); 95 | attriNormals = GLES20.glGetAttribLocation(program, "a_Normal"); 96 | 97 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0); 98 | GLES20.glGenTextures(1, textureId, 0); 99 | loadTexture(context, textureId, diffuseTextureAssetName); 100 | } 101 | 102 | private static void loadTexture(Context context, int[] textureId, String filename) 103 | throws IOException { 104 | Bitmap textureBitmap = BitmapFactory.decodeStream(context.getAssets().open(filename)); 105 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId[0]); 106 | GLES20.glTexParameteri( 107 | GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR_MIPMAP_LINEAR); 108 | GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); 109 | GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, textureBitmap, 0); 110 | GLES20.glGenerateMipmap(GLES20.GL_TEXTURE_2D); 111 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0); 112 | 113 | textureBitmap.recycle(); 114 | } 115 | 116 | public void draw( 117 | float[] projmtx, 118 | float[] viewmtx, 119 | float[] modelmtx, 120 | float[] colorCorrectionRgba, 121 | AugmentedFace face, 122 | ServerSocket SS) { 123 | 124 | 125 | 126 | FloatBuffer vertices = face.getMeshVertices(); 127 | 128 | 129 | 130 | FloatBuffer normals = face.getMeshNormals(); 131 | FloatBuffer textureCoords = face.getMeshTextureCoordinates(); 132 | ShortBuffer triangleIndices = face.getMeshTriangleIndices(); 133 | GLES20.glUseProgram(program); 134 | GLES20.glDepthMask(false); 135 | 136 | float[] modelViewProjectionMatTemp = new float[16]; 137 | Matrix.multiplyMM(modelViewProjectionMatTemp, 0, projmtx, 0, viewmtx, 0); 138 | Matrix.multiplyMM(modelViewProjectionMat, 0, modelViewProjectionMatTemp, 0, modelmtx, 0); 139 | Matrix.multiplyMM(modelViewMat, 0, viewmtx, 0, modelmtx, 0); 140 | 141 | // Set the lighting environment properties. 142 | Matrix.multiplyMV(viewLightDirection, 0, modelViewMat, 0, lightDirection, 0); 143 | normalizeVec3(viewLightDirection); 144 | 145 | GLES20.glUniform4f( 146 | lightingParametersUniform, 147 | viewLightDirection[0], 148 | viewLightDirection[1], 149 | viewLightDirection[2], 150 | 1.f); 151 | //GLES20.glUniform4fv(colorCorrectionParameterUniform, 1, colorCorrectionRgba, 0); 152 | 153 | // Set the object material properties. 154 | GLES20.glUniform4f(materialParametersUniform, ambient, diffuse, specular, specularPower); 155 | 156 | // Set the ModelViewProjection matrix in the shader. 157 | GLES20.glUniformMatrix4fv(modelViewUniform, 1, false, modelViewMat, 0); 158 | GLES20.glUniformMatrix4fv(modelViewProjectionUniform, 1, false, modelViewProjectionMat, 0); 159 | 160 | GLES20.glEnableVertexAttribArray(attriVertices); 161 | GLES20.glVertexAttribPointer(attriVertices, 3, GLES20.GL_FLOAT, false, 0, vertices); 162 | 163 | // GLES20.glEnableVertexAttribArray(attriNormals); 164 | // GLES20.glVertexAttribPointer(attriNormals, 3, GLES20.GL_FLOAT, false, 0, normals); 165 | 166 | // GLES20.glEnableVertexAttribArray(attriUvs); 167 | // GLES20.glVertexAttribPointer(attriUvs, 2, GLES20.GL_FLOAT, false, 0, textureCoords); 168 | 169 | // GLES20.glActiveTexture(GLES20.GL_TEXTURE0); 170 | // GLES20.glUniform1i(textureUniform, 0); 171 | 172 | // GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId[0]); 173 | // GLES20.glUniform4f(tintColorUniform, 0, 0, 0, 0); 174 | //GLES20.glEnable(GLES20.GL_BLEND); 175 | 176 | // Textures are loaded with premultiplied alpha 177 | // (https://developer.android.com/reference/android/graphics/BitmapFactory.Options#inPremultiplied), 178 | // so we use the premultiplied alpha blend factors. 179 | // GLES20.glBlendFunc(GLES20.GL_ONE, GLES20.GL_ONE_MINUS_SRC_ALPHA); 180 | //GLES20.gl 181 | GLES20.glDrawArrays(GLES20.GL_POINTS,0,vertices.limit()/3); 182 | //GLES20.glPolygonMode(GLES20.GL_FRONT_AND_BACK , GLES20.GL_LINE); 183 | // GLES20.glDrawElements( 184 | // GLES20.GL_TRIANGLES, triangleIndices.limit(), GLES20.GL_UNSIGNED_SHORT, triangleIndices); 185 | //GLES20.glPolygonMode(GLES20.GL_FRONT_AND_BACK, GLES20.GL_FILL); 186 | GLES20.glUseProgram(0); 187 | // GLES20.glDepthMask(true); 188 | 189 | /* 190 | ServerMessage SM = new ServerMessage(vertices,SS); 191 | 192 | MessageSender Sender = new MessageSender(); 193 | Sender.execute(SM); 194 | */ 195 | } 196 | 197 | public void setMaterialProperties( 198 | float ambient, float diffuse, float specular, float specularPower) { 199 | this.ambient = ambient; 200 | this.diffuse = diffuse; 201 | this.specular = specular; 202 | this.specularPower = specularPower; 203 | } 204 | 205 | private static void normalizeVec3(float[] v) { 206 | float reciprocalLength = 1.0f / (float) Math.sqrt(v[0] * v[0] + v[1] * v[1] + v[2] * v[2]); 207 | v[0] *= reciprocalLength; 208 | v[1] *= reciprocalLength; 209 | v[2] *= reciprocalLength; 210 | } 211 | } 212 | -------------------------------------------------------------------------------- /app/src/main/java/com/google/ar/core/examples/java/augmentedfaces/AugmentedFacesActivity.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package com.google.ar.core.examples.java.augmentedfaces; 18 | 19 | import android.content.ComponentName; 20 | import android.content.Intent; 21 | import android.content.ServiceConnection; 22 | import android.opengl.GLES20; 23 | import android.opengl.GLSurfaceView; 24 | import android.os.AsyncTask; 25 | import android.os.Bundle; 26 | import android.os.IBinder; 27 | import android.os.Message; 28 | import android.os.Messenger; 29 | import android.os.RemoteException; 30 | import android.util.Log; 31 | import android.widget.EditText; 32 | import android.widget.Toast; 33 | import androidx.appcompat.app.AppCompatActivity; 34 | import android.net.wifi.WifiManager; 35 | import android.content.Context; 36 | 37 | import com.google.ar.core.ArCoreApk; 38 | import com.google.ar.core.AugmentedFace; 39 | import com.google.ar.core.AugmentedFace.RegionType; 40 | import com.google.ar.core.Camera; 41 | import com.google.ar.core.CameraConfig; 42 | import com.google.ar.core.CameraConfigFilter; 43 | import com.google.ar.core.Config; 44 | import com.google.ar.core.Config.AugmentedFaceMode; 45 | import com.google.ar.core.Frame; 46 | import com.google.ar.core.Session; 47 | import com.google.ar.core.TrackingState; 48 | import com.google.ar.core.examples.java.common.helpers.CameraPermissionHelper; 49 | import com.google.ar.core.examples.java.common.helpers.DisplayRotationHelper; 50 | import com.google.ar.core.examples.java.common.helpers.FullScreenHelper; 51 | import com.google.ar.core.examples.java.common.helpers.SnackbarHelper; 52 | import com.google.ar.core.examples.java.common.helpers.TrackingStateHelper; 53 | import com.google.ar.core.examples.java.common.rendering.BackgroundRenderer; 54 | import com.google.ar.core.examples.java.common.rendering.ObjectRenderer; 55 | import com.google.ar.core.exceptions.CameraNotAvailableException; 56 | import com.google.ar.core.exceptions.UnavailableApkTooOldException; 57 | import com.google.ar.core.exceptions.UnavailableArcoreNotInstalledException; 58 | import com.google.ar.core.exceptions.UnavailableDeviceNotCompatibleException; 59 | import com.google.ar.core.exceptions.UnavailableSdkTooOldException; 60 | import com.google.ar.core.exceptions.UnavailableUserDeclinedInstallationException; 61 | 62 | import java.io.ByteArrayOutputStream; 63 | import java.io.IOException; 64 | import java.net.Socket; 65 | import java.net.UnknownHostException; 66 | import java.nio.ByteBuffer; 67 | import java.nio.FloatBuffer; 68 | import java.nio.ShortBuffer; 69 | import java.util.Collection; 70 | import java.util.EnumSet; 71 | import java.util.List; 72 | import javax.microedition.khronos.egl.EGLConfig; 73 | import javax.microedition.khronos.opengles.GL10; 74 | 75 | import java.net.InetAddress; 76 | import java.math.BigInteger; 77 | import java.nio.ByteOrder; 78 | import java.io.BufferedReader; 79 | import java.io.BufferedWriter; 80 | import java.io.IOException; 81 | import java.io.InputStreamReader; 82 | import java.io.InterruptedIOException; 83 | import java.io.OutputStreamWriter; 84 | import java.net.ServerSocket; 85 | import java.net.Socket; 86 | import java.util.concurrent.atomic.AtomicBoolean; 87 | 88 | import android.app.Activity; 89 | import android.os.Bundle; 90 | import android.util.Log; 91 | import android.widget.TextView; 92 | 93 | 94 | /** 95 | * This is a simple example that shows how to create an augmented reality (AR) application using the 96 | * ARCore API. The application will display any detected planes and will allow the user to tap on a 97 | * plane to place a 3d model of the Android robot. 98 | */ 99 | public class AugmentedFacesActivity extends AppCompatActivity implements GLSurfaceView.Renderer { 100 | private static final String TAG = AugmentedFacesActivity.class.getSimpleName(); 101 | 102 | // Rendering. The Renderers are created here, and initialized when the GL surface is created. 103 | private GLSurfaceView surfaceView; 104 | 105 | private boolean installRequested; 106 | 107 | private Session session; 108 | private final SnackbarHelper messageSnackbarHelper = new SnackbarHelper(); 109 | private DisplayRotationHelper displayRotationHelper; 110 | private final TrackingStateHelper trackingStateHelper = new TrackingStateHelper(this); 111 | 112 | private final BackgroundRenderer backgroundRenderer = new BackgroundRenderer(); 113 | private final AugmentedFaceRenderer augmentedFaceRenderer = new AugmentedFaceRenderer(); 114 | private final ObjectRenderer noseObject = new ObjectRenderer(); 115 | private final ObjectRenderer rightEarObject = new ObjectRenderer(); 116 | private final ObjectRenderer leftEarObject = new ObjectRenderer(); 117 | // Temporary matrix allocated here to reduce number of allocations for each frame. 118 | private final float[] noseMatrix = new float[16]; 119 | private final float[] rightEarMatrix = new float[16]; 120 | private final float[] leftEarMatrix = new float[16]; 121 | private static final float[] DEFAULT_COLOR = new float[] {0f, 0f, 0f, 0f}; 122 | 123 | private static final int TCP_SERVER_PORT = 9886; 124 | 125 | 126 | private String getLocalWifiIpAddress() { 127 | WifiManager wifiManager = (WifiManager) getApplicationContext().getSystemService(WIFI_SERVICE); 128 | 129 | String ipAddressString = "Hello World"; 130 | 131 | if(wifiManager==null) 132 | { 133 | ipAddressString = "wifiManager not found"; 134 | return ipAddressString; 135 | } 136 | 137 | 138 | int ipAddress = wifiManager.getConnectionInfo().getIpAddress(); 139 | 140 | if (ByteOrder.nativeOrder().equals(ByteOrder.LITTLE_ENDIAN)) { 141 | ipAddress = Integer.reverseBytes(ipAddress); 142 | } 143 | 144 | byte[] ipByteArray = BigInteger.valueOf(ipAddress).toByteArray(); 145 | 146 | 147 | try { 148 | ipAddressString = InetAddress.getByAddress(ipByteArray).getHostAddress(); 149 | } catch (UnknownHostException ex) { 150 | ipAddressString = null; 151 | ex.printStackTrace(); 152 | } 153 | 154 | return ipAddressString; 155 | } 156 | 157 | EditText e1; 158 | ServerSocket ServerSoc; 159 | Intent TCPServerServiceIntent; 160 | 161 | Messenger mServerMessenger = null; 162 | boolean ServiceBound = false; 163 | 164 | private ServiceConnection ServerServiceConnection = new ServiceConnection() 165 | { 166 | @Override 167 | public void onServiceDisconnected(ComponentName componentName) { 168 | 169 | mServerMessenger=null; 170 | ServiceBound=false; 171 | 172 | } 173 | 174 | @Override 175 | public void onServiceConnected(ComponentName componentName, IBinder iBinder) { 176 | 177 | mServerMessenger = new Messenger(iBinder); 178 | ServiceBound=true; 179 | } 180 | }; 181 | 182 | public void SendFaceDataToClients(FloatBuffer FB) 183 | { 184 | if(ServiceBound) 185 | { 186 | Message message = Message.obtain(null, 1,0,0,0); 187 | message.obj = FB; 188 | try { 189 | mServerMessenger.send(message); 190 | } catch (RemoteException e) { 191 | e.printStackTrace(); 192 | } 193 | } 194 | 195 | } 196 | 197 | 198 | public Socket ClientSocket = null; 199 | 200 | @Override 201 | protected void onCreate(Bundle savedInstanceState) { 202 | super.onCreate(savedInstanceState); 203 | setContentView(R.layout.activity_main); 204 | surfaceView = findViewById(R.id.surfaceview); 205 | displayRotationHelper = new DisplayRotationHelper(/*context=*/ this); 206 | 207 | // Set up renderer. 208 | surfaceView.setPreserveEGLContextOnPause(true); 209 | surfaceView.setEGLContextClientVersion(2); 210 | surfaceView.setEGLConfigChooser(8, 8, 8, 8, 16, 0); // Alpha used for plane blending. 211 | surfaceView.setRenderer(this); 212 | surfaceView.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY); 213 | surfaceView.setWillNotDraw(false); 214 | 215 | e1 = (EditText)findViewById(R.id.IPAdressText) ; 216 | String test = getLocalWifiIpAddress()+" | port : " + String.valueOf(TCP_SERVER_PORT); 217 | e1.setText(test); 218 | installRequested = false; 219 | 220 | TCPServerServiceIntent = new Intent(this, TcpServerService.class); 221 | 222 | bindService(TCPServerServiceIntent,ServerServiceConnection,Context.BIND_AUTO_CREATE); 223 | 224 | startService(TCPServerServiceIntent); 225 | 226 | } 227 | 228 | @Override 229 | protected void onStop() { 230 | if(ServiceBound) 231 | unbindService(ServerServiceConnection); 232 | mServerMessenger=null; 233 | ServiceBound=false; 234 | super.onStop(); 235 | } 236 | 237 | @Override 238 | protected void onDestroy() { 239 | if (session != null) { 240 | // Explicitly close ARCore Session to release native resources. 241 | // Review the API reference for important considerations before calling close() in apps with 242 | // more complicated lifecycle requirements: 243 | // https://developers.google.com/ar/reference/java/arcore/reference/com/google/ar/core/Session#close() 244 | session.close(); 245 | session = null; 246 | } 247 | 248 | super.onDestroy(); 249 | } 250 | 251 | @Override 252 | protected void onResume() { 253 | super.onResume(); 254 | 255 | if (session == null) { 256 | Exception exception = null; 257 | String message = null; 258 | try { 259 | switch (ArCoreApk.getInstance().requestInstall(this, !installRequested)) { 260 | case INSTALL_REQUESTED: 261 | installRequested = true; 262 | return; 263 | case INSTALLED: 264 | break; 265 | } 266 | 267 | // ARCore requires camera permissions to operate. If we did not yet obtain runtime 268 | // permission on Android M and above, now is a good time to ask the user for it. 269 | if (!CameraPermissionHelper.hasCameraPermission(this)) { 270 | CameraPermissionHelper.requestCameraPermission(this); 271 | return; 272 | } 273 | 274 | // Create the session and configure it to use a front-facing (selfie) camera. 275 | session = new Session(/* context= */ this, EnumSet.noneOf(Session.Feature.class)); 276 | CameraConfigFilter cameraConfigFilter = new CameraConfigFilter(session); 277 | cameraConfigFilter.setFacingDirection(CameraConfig.FacingDirection.FRONT); 278 | List cameraConfigs = session.getSupportedCameraConfigs(cameraConfigFilter); 279 | if (!cameraConfigs.isEmpty()) { 280 | // Element 0 contains the camera config that best matches the session feature 281 | // and filter settings. 282 | session.setCameraConfig(cameraConfigs.get(0)); 283 | } else { 284 | message = "This device does not have a front-facing (selfie) camera"; 285 | exception = new UnavailableDeviceNotCompatibleException(message); 286 | } 287 | configureSession(); 288 | 289 | } catch (UnavailableArcoreNotInstalledException 290 | | UnavailableUserDeclinedInstallationException e) { 291 | message = "Please install ARCore"; 292 | exception = e; 293 | } catch (UnavailableApkTooOldException e) { 294 | message = "Please update ARCore"; 295 | exception = e; 296 | } catch (UnavailableSdkTooOldException e) { 297 | message = "Please update this app"; 298 | exception = e; 299 | } catch (UnavailableDeviceNotCompatibleException e) { 300 | message = "This device does not support AR"; 301 | exception = e; 302 | } catch (Exception e) { 303 | message = "Failed to create AR session"; 304 | exception = e; 305 | } 306 | 307 | if (message != null) { 308 | messageSnackbarHelper.showError(this, message); 309 | Log.e(TAG, "Exception creating session", exception); 310 | return; 311 | } 312 | } 313 | 314 | // Note that order matters - see the note in onPause(), the reverse applies here. 315 | try { 316 | session.resume(); 317 | } catch (CameraNotAvailableException e) { 318 | messageSnackbarHelper.showError(this, "Camera not available. Try restarting the app."); 319 | session = null; 320 | return; 321 | } 322 | 323 | surfaceView.onResume(); 324 | displayRotationHelper.onResume(); 325 | } 326 | 327 | @Override 328 | public void onPause() { 329 | super.onPause(); 330 | if (session != null) { 331 | // Note that the order matters - GLSurfaceView is paused first so that it does not try 332 | // to query the session. If Session is paused before GLSurfaceView, GLSurfaceView may 333 | // still call session.update() and get a SessionPausedException. 334 | displayRotationHelper.onPause(); 335 | surfaceView.onPause(); 336 | session.pause(); 337 | } 338 | } 339 | 340 | @Override 341 | public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] results) { 342 | super.onRequestPermissionsResult(requestCode, permissions, results); 343 | if (!CameraPermissionHelper.hasCameraPermission(this)) { 344 | Toast.makeText(this, "Camera permission is needed to run this application", Toast.LENGTH_LONG) 345 | .show(); 346 | if (!CameraPermissionHelper.shouldShowRequestPermissionRationale(this)) { 347 | // Permission denied with checking "Do not ask again". 348 | CameraPermissionHelper.launchPermissionSettings(this); 349 | } 350 | finish(); 351 | } 352 | } 353 | 354 | @Override 355 | public void onWindowFocusChanged(boolean hasFocus) { 356 | super.onWindowFocusChanged(hasFocus); 357 | FullScreenHelper.setFullScreenOnWindowFocusChanged(this, hasFocus); 358 | } 359 | 360 | @Override 361 | public void onSurfaceCreated(GL10 gl, EGLConfig config) { 362 | GLES20.glClearColor(0.1f, 0.1f, 0.1f, 1.0f); 363 | 364 | // Prepare the rendering objects. This involves reading shaders, so may throw an IOException. 365 | try { 366 | // Create the texture and pass it to ARCore session to be filled during update(). 367 | backgroundRenderer.createOnGlThread(/*context=*/ this); 368 | augmentedFaceRenderer.createOnGlThread(this, "models/freckles.png"); 369 | augmentedFaceRenderer.setMaterialProperties(0.0f, 1.0f, 0.1f, 6.0f); 370 | noseObject.createOnGlThread(/*context=*/ this, "models/nose.obj", "models/nose_fur.png"); 371 | noseObject.setMaterialProperties(0.0f, 1.0f, 0.1f, 6.0f); 372 | noseObject.setBlendMode(ObjectRenderer.BlendMode.AlphaBlending); 373 | rightEarObject.createOnGlThread(this, "models/forehead_right.obj", "models/ear_fur.png"); 374 | rightEarObject.setMaterialProperties(0.0f, 1.0f, 0.1f, 6.0f); 375 | rightEarObject.setBlendMode(ObjectRenderer.BlendMode.AlphaBlending); 376 | leftEarObject.createOnGlThread(this, "models/forehead_left.obj", "models/ear_fur.png"); 377 | leftEarObject.setMaterialProperties(0.0f, 1.0f, 0.1f, 6.0f); 378 | leftEarObject.setBlendMode(ObjectRenderer.BlendMode.AlphaBlending); 379 | 380 | } catch (IOException e) { 381 | Log.e(TAG, "Failed to read an asset file", e); 382 | } 383 | } 384 | 385 | @Override 386 | public void onSurfaceChanged(GL10 gl, int width, int height) { 387 | displayRotationHelper.onSurfaceChanged(width, height); 388 | GLES20.glViewport(0, 0, width, height); 389 | } 390 | 391 | public AtomicBoolean ClientSet1 = new AtomicBoolean(false); 392 | public AtomicBoolean ClientSet2 = new AtomicBoolean(true); 393 | 394 | @Override 395 | public void onDrawFrame(GL10 gl) { 396 | // Clear screen to notify driver it should not load any pixels from previous frame. 397 | GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT); 398 | 399 | if (session == null) { 400 | return; 401 | } 402 | // Notify ARCore session that the view size changed so that the perspective matrix and 403 | // the video background can be properly adjusted. 404 | displayRotationHelper.updateSessionIfNeeded(session); 405 | 406 | try { 407 | session.setCameraTextureName(backgroundRenderer.getTextureId()); 408 | 409 | // Obtain the current frame from ARSession. When the configuration is set to 410 | // UpdateMode.BLOCKING (it is by default), this will throttle the rendering to the 411 | // camera framerate. 412 | Frame frame = session.update(); 413 | Camera camera = frame.getCamera(); 414 | 415 | // Get projection matrix. 416 | float[] projectionMatrix = new float[16]; 417 | camera.getProjectionMatrix(projectionMatrix, 0, 0.1f, 100.0f); 418 | 419 | // Get camera matrix and draw. 420 | float[] viewMatrix = new float[16]; 421 | camera.getViewMatrix(viewMatrix, 0); 422 | 423 | // Compute lighting from average intensity of the image. 424 | // The first three components are color scaling factors. 425 | // The last one is the average pixel intensity in gamma space. 426 | final float[] colorCorrectionRgba = new float[4]; 427 | //frame.getLightEstimate().getColorCorrection(colorCorrectionRgba, 0); 428 | 429 | // If frame is ready, render camera preview image to the GL surface. 430 | backgroundRenderer.draw(frame); 431 | 432 | // Keep the screen unlocked while tracking, but allow it to lock when tracking stops. 433 | trackingStateHelper.updateKeepScreenOnFlag(camera.getTrackingState()); 434 | 435 | // ARCore's face detection works best on upright faces, relative to gravity. 436 | // If the device cannot determine a screen side aligned with gravity, face 437 | // detection may not work optimally. 438 | Collection faces = session.getAllTrackables(AugmentedFace.class); 439 | for (AugmentedFace face : faces) { 440 | if (face.getTrackingState() != TrackingState.TRACKING) { 441 | break; 442 | } 443 | 444 | float scaleFactor = 1.0f; 445 | 446 | float[] modelMatrix = new float[16]; 447 | face.getCenterPose().toMatrix(modelMatrix, 0); 448 | 449 | if(ServiceBound) 450 | { 451 | 452 | // Append two bytes array of ARCore face points (468 points of 3 floats of 32 bits | 468*3*4) 453 | // followed by Head pivot Matrix 4x4 floats 454 | // Send them to the TCP server service using a Messenger 455 | 456 | 457 | FloatBuffer VerticesFace = face.getMeshVertices().duplicate(); 458 | 459 | ByteBuffer byteBuffer = ByteBuffer.allocate(468*3*4);//468 vector : 3 x 32bits floats + 16 * 32bits float matrix 460 | byteBuffer.asFloatBuffer().put(VerticesFace); 461 | byte[] bytearray = byteBuffer.array(); 462 | 463 | 464 | 465 | float[] TranslationCamera = face.getCenterPose().getTranslation(); 466 | 467 | float[] modelMatrix_c = modelMatrix.clone(); 468 | modelMatrix_c[3]=TranslationCamera[0]; 469 | modelMatrix_c[7]=TranslationCamera[1]; 470 | modelMatrix_c[11]=TranslationCamera[2]; 471 | 472 | 473 | ByteBuffer byteBufferM = ByteBuffer.allocate(16*4); 474 | byteBufferM.asFloatBuffer().put(modelMatrix_c); 475 | byte[] bytearray2 = byteBufferM.array(); 476 | 477 | ByteArrayOutputStream outputStream = new ByteArrayOutputStream( ); 478 | outputStream.write( bytearray ); 479 | outputStream.write( bytearray2 ); 480 | 481 | byte c[] = outputStream.toByteArray( ); 482 | 483 | 484 | Message message = Message.obtain(); 485 | Bundle bundle = new Bundle(); 486 | 487 | bundle.putByteArray("data",c); 488 | 489 | 490 | message.setData(bundle); 491 | try { 492 | mServerMessenger.send(message); 493 | } catch (RemoteException e) { 494 | e.printStackTrace(); 495 | } 496 | 497 | } 498 | 499 | // Face objects use transparency so they must be rendered back to front without depth write. 500 | // GLES20.glDepthMask(false); 501 | 502 | // Each face's region poses, mesh vertices, and mesh normals are updated every frame. 503 | 504 | // 1. Render the face mesh first, behind any 3D objects attached to the face regions. 505 | 506 | 507 | augmentedFaceRenderer.draw( 508 | projectionMatrix, viewMatrix, modelMatrix, colorCorrectionRgba, face,ServerSoc); 509 | 510 | /* 511 | // 2. Next, render the 3D objects attached to the forehead. 512 | face.getRegionPose(RegionType.FOREHEAD_RIGHT).toMatrix(rightEarMatrix, 0); 513 | rightEarObject.updateModelMatrix(rightEarMatrix, scaleFactor); 514 | rightEarObject.draw(viewMatrix, projectionMatrix, colorCorrectionRgba, DEFAULT_COLOR); 515 | 516 | face.getRegionPose(RegionType.FOREHEAD_LEFT).toMatrix(leftEarMatrix, 0); 517 | leftEarObject.updateModelMatrix(leftEarMatrix, scaleFactor); 518 | leftEarObject.draw(viewMatrix, projectionMatrix, colorCorrectionRgba, DEFAULT_COLOR); 519 | 520 | // 3. Render the nose last so that it is not occluded by face mesh or by 3D objects attached 521 | // to the forehead regions. 522 | face.getRegionPose(RegionType.NOSE_TIP).toMatrix(noseMatrix, 0); 523 | noseObject.updateModelMatrix(noseMatrix, scaleFactor); 524 | noseObject.draw(viewMatrix, projectionMatrix, colorCorrectionRgba, DEFAULT_COLOR);*/ 525 | } 526 | } catch (Throwable t) { 527 | // Avoid crashing the application due to unhandled exceptions. 528 | Log.e(TAG, "Exception on the OpenGL thread", t); 529 | } finally { 530 | GLES20.glDepthMask(true); 531 | } 532 | } 533 | 534 | private void configureSession() { 535 | Config config = new Config(session); 536 | config.setAugmentedFaceMode(AugmentedFaceMode.MESH3D); 537 | session.configure(config); 538 | } 539 | } 540 | -------------------------------------------------------------------------------- /app/src/main/java/com/google/ar/core/examples/java/augmentedfaces/MessageSender.java: -------------------------------------------------------------------------------- 1 | package com.google.ar.core.examples.java.augmentedfaces; 2 | 3 | import android.os.AsyncTask; 4 | import android.app.Service; 5 | import android.os.Process; 6 | import java.io.DataOutputStream; 7 | import java.io.IOException; 8 | import java.io.PrintWriter; 9 | import java.net.ServerSocket; 10 | import java.net.Socket; 11 | import java.nio.FloatBuffer; 12 | import java.nio.ByteBuffer; 13 | import java.util.Arrays; 14 | 15 | import static android.os.Process.THREAD_PRIORITY_BACKGROUND; 16 | import static android.os.Process.THREAD_PRIORITY_MORE_FAVORABLE; 17 | import static android.os.Process.THREAD_PRIORITY_DEFAULT; 18 | import static android.os.Process.THREAD_PRIORITY_DISPLAY; 19 | 20 | public class MessageSender extends AsyncTask 21 | { 22 | private static final int TCP_SERVER_PORT = 9886; 23 | Socket S; 24 | DataOutputStream dos; 25 | PrintWriter pw; 26 | 27 | @Override 28 | protected Void doInBackground(ServerMessage... FB) 29 | { 30 | Process.setThreadPriority(THREAD_PRIORITY_DISPLAY /*+ THREAD_PRIORITY_MORE_FAVORABLE*/); 31 | AugmentedFacesActivity ClientHolder = FB[0].ClientIDHolder; 32 | 33 | // if(ClientHolder.ServerSoc.accept()==null) 34 | // ClientHolder.ClientSocket=null; 35 | 36 | if(ClientHolder.ClientSocket==null) { 37 | try { 38 | ClientHolder.ClientSocket = ClientHolder.ServerSoc.accept(); 39 | } catch (IOException e) { 40 | e.printStackTrace(); 41 | } 42 | } 43 | 44 | if(ClientHolder.ClientSocket!=null) 45 | { 46 | 47 | ByteBuffer byteBuffer = ByteBuffer.allocate(468*3* 4);//468 vector : 3 x 32bits floats 48 | byteBuffer.asFloatBuffer().put(FB[0].FB); 49 | 50 | byte[] bytearray = byteBuffer.array(); 51 | 52 | // ByteBuffer.allocate(468*3*4).asFloatBuffer(FB[0].FB); 53 | /* 54 | byte[] bytearray = new byte[10]; 55 | bytearray[0]=0; 56 | bytearray[8]=1; 57 | */ 58 | 59 | try { 60 | ClientHolder.ClientSocket.getOutputStream().write(bytearray); 61 | } catch (IOException e) { 62 | e.printStackTrace(); 63 | } 64 | } 65 | 66 | 67 | // ClientHolder.ClientSet2.set(true); 68 | 69 | 70 | /* 71 | FloatBuffer FloatB = FB[0].FB.duplicate(); 72 | ServerSocket ss = FB[0].Server; 73 | //FloatB.limit(468*3);//for whatever reason the buffer 74 | 75 | ByteBuffer byteBuffer = ByteBuffer.allocate(468*3* 4);//468 vector : 3 x 32bits floats 76 | byteBuffer.asFloatBuffer().put(FloatB); 77 | 78 | byte[] bytearray = byteBuffer.array(); 79 | */ 80 | /* 81 | try{ 82 | //ServerSocket ss = new ServerSocket(TCP_SERVER_PORT); 83 | 84 | //ss.setSoTimeout(10000); 85 | //accept connections 86 | if(FB[0].Client != null) 87 | { 88 | //S = ss.accept(); 89 | FB[0].Client.getOutputStream().write(bytearray); 90 | //S.close(); 91 | } 92 | //ss.close(); 93 | //S = ss.accept(); 94 | 95 | // S= new Socket("192.168.1.10",7800); 96 | 97 | 98 | 99 | //pw = new PrintWriter(S.getOutputStream()); 100 | // pw.write 101 | }catch(IOException e) 102 | { 103 | e.printStackTrace(); 104 | }*/ 105 | 106 | 107 | 108 | return null; 109 | } 110 | } 111 | 112 | 113 | -------------------------------------------------------------------------------- /app/src/main/java/com/google/ar/core/examples/java/augmentedfaces/ServerMessage.java: -------------------------------------------------------------------------------- 1 | package com.google.ar.core.examples.java.augmentedfaces; 2 | 3 | import java.net.ServerSocket; 4 | import java.net.Socket; 5 | import java.nio.FloatBuffer; 6 | 7 | public class ServerMessage { 8 | FloatBuffer FB; 9 | ServerSocket Server; 10 | AugmentedFacesActivity ClientIDHolder; 11 | 12 | public ServerMessage() 13 | { 14 | 15 | } 16 | public ServerMessage(AugmentedFacesActivity Client_) 17 | { 18 | this.ClientIDHolder=Client_; 19 | } 20 | 21 | public ServerMessage(FloatBuffer FB_, ServerSocket Server_, AugmentedFacesActivity Client_) 22 | { 23 | this.FB=FB_; 24 | this.Server=Server_; 25 | this.ClientIDHolder=Client_; 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /app/src/main/java/com/google/ar/core/examples/java/augmentedfaces/TcpClientHandler.java: -------------------------------------------------------------------------------- 1 | package com.google.ar.core.examples.java.augmentedfaces; 2 | 3 | import android.os.Handler; 4 | import android.os.Message; 5 | 6 | import java.io.DataInputStream; 7 | import java.io.DataOutputStream; 8 | import java.io.IOException; 9 | import java.io.InputStream; 10 | import java.io.OutputStream; 11 | import java.net.Socket; 12 | import java.nio.ByteBuffer; 13 | import java.nio.FloatBuffer; 14 | import java.util.StringTokenizer; 15 | import java.util.concurrent.atomic.AtomicBoolean; 16 | 17 | public class TcpClientHandler extends Thread{ 18 | 19 | public Handler mHandler ;//= new Handler(); 20 | 21 | FloatBuffer DataToSend; 22 | private boolean DataToProcessed=false; 23 | private AtomicBoolean UpToDate = new AtomicBoolean(false); 24 | public AtomicBoolean ConnectionRelevant = new AtomicBoolean(true); 25 | private TcpServerService Owner; 26 | 27 | private String name; 28 | final DataInputStream InStream; 29 | final DataOutputStream OStream; 30 | Socket S; 31 | boolean isloggedin; 32 | 33 | 34 | 35 | 36 | 37 | TcpClientHandler(Socket S, String name, DataInputStream InStream_, DataOutputStream OStream_) 38 | { 39 | this.InStream = InStream_; 40 | this.OStream = OStream_; 41 | this.name = name; 42 | this.S = S; 43 | this.isloggedin=true; 44 | 45 | } 46 | 47 | @Override 48 | public void run() { 49 | super.run(); 50 | 51 | String received; 52 | 53 | while (ConnectionRelevant.get()) { 54 | 55 | 56 | int Id = 0; 57 | for (TcpClientHandler mc : TcpServerService.ar) 58 | { 59 | if ( mc.isloggedin==true && mc.name.equals(name)) 60 | { 61 | break; 62 | /* 63 | try { 64 | mc.OStream.writeUTF(this.name+" : "+TcpServerService.DataString); 65 | } catch (IOException e) { 66 | e.printStackTrace(); 67 | } 68 | break; 69 | */ 70 | } 71 | Id+=1; 72 | } 73 | 74 | if(!TcpServerService.ar_DataSent.get(Id).get()) 75 | { 76 | try { 77 | OStream.write(TcpServerService.DataBytes); 78 | } catch (IOException e) { 79 | e.printStackTrace(); 80 | ConnectionRelevant.set(false); 81 | break; 82 | } 83 | TcpServerService.ar_DataSent.get(Id).set(true); 84 | } 85 | 86 | } 87 | 88 | try { 89 | this.InStream.close(); 90 | } catch (IOException e) { 91 | e.printStackTrace(); 92 | } 93 | try { 94 | this.OStream.close(); 95 | } catch (IOException e) { 96 | e.printStackTrace(); 97 | } 98 | 99 | return; 100 | } 101 | } 102 | -------------------------------------------------------------------------------- /app/src/main/java/com/google/ar/core/examples/java/augmentedfaces/TcpServerService.java: -------------------------------------------------------------------------------- 1 | package com.google.ar.core.examples.java.augmentedfaces; 2 | 3 | import android.app.Service; 4 | import android.content.Intent; 5 | import android.os.Bundle; 6 | import android.os.Handler; 7 | import android.os.IBinder; 8 | import android.os.Message; 9 | import android.os.Messenger; 10 | 11 | import androidx.annotation.NonNull; 12 | import androidx.annotation.Nullable; 13 | 14 | import java.io.DataInputStream; 15 | import java.io.DataOutputStream; 16 | import java.io.IOException; 17 | import java.io.InputStream; 18 | import java.io.OutputStream; 19 | import java.net.ServerSocket; 20 | import java.net.Socket; 21 | import java.nio.ByteBuffer; 22 | import java.nio.FloatBuffer; 23 | import java.util.Vector; 24 | import java.util.concurrent.atomic.AtomicBoolean; 25 | 26 | public class TcpServerService extends Service { 27 | 28 | static String DataString = ""; 29 | 30 | static byte[] DataBytes; 31 | 32 | static FloatBuffer DataToSend; 33 | 34 | // Vector to store active clients 35 | static Vector ar = new Vector<>(); 36 | static Vector ar_DataSent = new Vector<>(); 37 | static Vector ar_Socket = new Vector<>(); 38 | 39 | // counter for clients 40 | static int j = 0; 41 | 42 | boolean ClientDataUptoDate[]= new boolean[5]; 43 | Socket ClientSockets[]= new Socket[5]; 44 | TcpClientHandler ClientsThreads[]= new TcpClientHandler[5]; 45 | 46 | 47 | private static final int TCP_SERVER_PORT = 9886; 48 | private ServerSocket SSocket = null; 49 | private AtomicBoolean Working = new AtomicBoolean(true); 50 | private Runnable Runn = new Runnable() { 51 | @Override 52 | public void run() { 53 | 54 | try 55 | { 56 | SSocket = new ServerSocket(TCP_SERVER_PORT); 57 | Socket s; 58 | while(true) 59 | { 60 | s = SSocket.accept(); 61 | 62 | System.out.println("New client request received : " + s); 63 | 64 | // obtain input and output streams 65 | // InputStream INtream = s.getInputStream(); 66 | // OutputStream OStream = s.getOutputStream(); 67 | 68 | DataInputStream dis = new DataInputStream(s.getInputStream()); 69 | DataOutputStream dos = new DataOutputStream(s.getOutputStream()); 70 | 71 | System.out.println("Creating a new handler for this client..."); 72 | 73 | // Create a new handler object for handling this request. 74 | TcpClientHandler mtch = new TcpClientHandler(s,"client " + j, dis, dos); 75 | 76 | // Create a new Thread with this object. 77 | Thread t = new Thread(mtch); 78 | 79 | System.out.println("Adding this client to active client list"); 80 | 81 | // add this client to active clients list 82 | ar.add(mtch); 83 | 84 | AtomicBoolean SentLatestData = new AtomicBoolean(true); 85 | ar_DataSent.add(SentLatestData); 86 | 87 | ar_Socket.add(s); 88 | 89 | // start the thread. 90 | t.start(); 91 | 92 | // increment i for new client. 93 | // i is used for naming only, and can be replaced 94 | // by any naming scheme 95 | j++; 96 | } 97 | 98 | } 99 | catch(IOException e) 100 | { 101 | 102 | } 103 | /* 104 | for(int i = 0;i<5; i+=1) 105 | { 106 | ClientDataUptoDate[i]=false; 107 | ClientSockets[i]=null; 108 | ClientsThreads[i]=null; 109 | 110 | } 111 | 112 | try{ 113 | SSocket = new ServerSocket(TCP_SERVER_PORT); 114 | while (Working.get()) { 115 | if (SSocket != null) { 116 | 117 | for(int i=0;i<5;i+=1) 118 | { 119 | 120 | 121 | if(ClientSockets[i]==null) 122 | { 123 | ClientSockets[i] = SSocket.accept(); 124 | InputStream INtream = ClientSockets[i].getInputStream(); 125 | OutputStream OStream = ClientSockets[i].getOutputStream(); 126 | ClientsThreads[i] = new TcpClientHandler(INtream, OStream); 127 | Thread t = ClientsThreads[i]; 128 | t.start(); 129 | } 130 | else 131 | { 132 | 133 | 134 | //TODO Heartbeat function to monitor client disconnections , stop appropriate client thread, and close client Socket, setting them both back to null 135 | } 136 | } 137 | 138 | 139 | 140 | 141 | 142 | } else { 143 | // Log.e(TAG, "Couldn't create ServerSocket!") 144 | } 145 | } 146 | } 147 | catch(IOException e) 148 | { 149 | e.printStackTrace(); 150 | try { 151 | for(int i=0;i<5;i+=1) 152 | { 153 | if(ClientSockets[i]!=null) 154 | ClientSockets[i].close(); 155 | } 156 | 157 | } catch (IOException ex) { 158 | ex.printStackTrace(); 159 | } 160 | }*/ 161 | } 162 | }; 163 | 164 | @Nullable 165 | @Override 166 | public IBinder onBind(Intent intent) { 167 | return mMessenger.getBinder(); 168 | } 169 | 170 | @Override 171 | public void onCreate() { 172 | super.onCreate(); 173 | new Thread(Runn).start(); 174 | } 175 | 176 | @Override 177 | public void onDestroy() { 178 | super.onDestroy(); 179 | Working.set(false); 180 | } 181 | 182 | Messenger mMessenger = new Messenger(new IncomingHandler()); 183 | 184 | class IncomingHandler extends Handler 185 | { 186 | @Override 187 | public void handleMessage(Message msg) { 188 | 189 | 190 | for(int i = ar.size()-1;i>=0;i = i-1) 191 | { 192 | if(!ar.get(i).ConnectionRelevant.get()) 193 | { 194 | try { 195 | ar_Socket.get(i).close(); 196 | } catch (IOException e) { 197 | e.printStackTrace(); 198 | } 199 | 200 | ar_Socket.remove(i); 201 | ar.remove(i); 202 | ar_DataSent.remove(i); 203 | 204 | System.out.println("removing closed socket from client list"); 205 | } 206 | 207 | } 208 | 209 | 210 | 211 | 212 | // the idea here is that per client threads are running at a much higher frequency than we have updates 213 | // ideally we should verify that every client has already sent DataBytes and isn't currently reading it to avoid any race condition 214 | // therefore here we assume that all threads are up to date and aren't reading DataBytes 215 | Bundle ReceivedData = msg.getData(); 216 | DataBytes = ReceivedData.getByteArray("data"); 217 | 218 | for(AtomicBoolean bool :ar_DataSent) 219 | { 220 | bool.set(false); 221 | } 222 | 223 | super.handleMessage(msg); 224 | } 225 | } 226 | 227 | } 228 | -------------------------------------------------------------------------------- /app/src/main/java/com/google/ar/core/examples/java/common/helpers/CameraPermissionHelper.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.google.ar.core.examples.java.common.helpers; 17 | 18 | import android.Manifest; 19 | import android.app.Activity; 20 | import android.content.Intent; 21 | import android.content.pm.PackageManager; 22 | import android.net.Uri; 23 | import android.provider.Settings; 24 | import androidx.core.app.ActivityCompat; 25 | import androidx.core.content.ContextCompat; 26 | 27 | /** Helper to ask camera permission. */ 28 | public final class CameraPermissionHelper { 29 | private static final int CAMERA_PERMISSION_CODE = 0; 30 | private static final String CAMERA_PERMISSION = Manifest.permission.CAMERA; 31 | 32 | /** Check to see we have the necessary permissions for this app. */ 33 | public static boolean hasCameraPermission(Activity activity) { 34 | return ContextCompat.checkSelfPermission(activity, CAMERA_PERMISSION) 35 | == PackageManager.PERMISSION_GRANTED; 36 | } 37 | 38 | /** Check to see we have the necessary permissions for this app, and ask for them if we don't. */ 39 | public static void requestCameraPermission(Activity activity) { 40 | ActivityCompat.requestPermissions( 41 | activity, new String[] {CAMERA_PERMISSION}, CAMERA_PERMISSION_CODE); 42 | } 43 | 44 | /** Check to see if we need to show the rationale for this permission. */ 45 | public static boolean shouldShowRequestPermissionRationale(Activity activity) { 46 | return ActivityCompat.shouldShowRequestPermissionRationale(activity, CAMERA_PERMISSION); 47 | } 48 | 49 | /** Launch Application Setting to grant permission. */ 50 | public static void launchPermissionSettings(Activity activity) { 51 | Intent intent = new Intent(); 52 | intent.setAction(Settings.ACTION_APPLICATION_DETAILS_SETTINGS); 53 | intent.setData(Uri.fromParts("package", activity.getPackageName(), null)); 54 | activity.startActivity(intent); 55 | } 56 | } 57 | -------------------------------------------------------------------------------- /app/src/main/java/com/google/ar/core/examples/java/common/helpers/DepthSettings.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.google.ar.core.examples.java.common.helpers; 17 | 18 | import android.content.Context; 19 | import android.content.SharedPreferences; 20 | 21 | /** Manages the Occlusion option setting and shared preferences. */ 22 | public class DepthSettings { 23 | public static final String SHARED_PREFERENCES_ID = "SHARED_PREFERENCES_OCCLUSION_OPTIONS"; 24 | public static final String SHARED_PREFERENCES_SHOW_DEPTH_ENABLE_DIALOG_OOBE = 25 | "show_depth_enable_dialog_oobe"; 26 | public static final String SHARED_PREFERENCES_USE_DEPTH_FOR_OCCLUSION = "use_depth_for_occlusion"; 27 | 28 | // Current depth-based settings used by the app. 29 | private boolean depthColorVisualizationEnabled = false; 30 | private boolean useDepthForOcclusion = false; 31 | private SharedPreferences sharedPreferences; 32 | 33 | /** Initializes the current settings based on when the app was last used. */ 34 | public void onCreate(Context context) { 35 | sharedPreferences = context.getSharedPreferences(SHARED_PREFERENCES_ID, Context.MODE_PRIVATE); 36 | useDepthForOcclusion = 37 | sharedPreferences.getBoolean(SHARED_PREFERENCES_USE_DEPTH_FOR_OCCLUSION, false); 38 | } 39 | 40 | /** Retrieves whether depth-based occlusion is enabled. */ 41 | public boolean useDepthForOcclusion() { 42 | return useDepthForOcclusion; 43 | } 44 | 45 | public void setUseDepthForOcclusion(boolean enable) { 46 | if (enable == useDepthForOcclusion) { 47 | return; // No change. 48 | } 49 | 50 | // Updates the stored default settings. 51 | useDepthForOcclusion = enable; 52 | SharedPreferences.Editor editor = sharedPreferences.edit(); 53 | editor.putBoolean(SHARED_PREFERENCES_USE_DEPTH_FOR_OCCLUSION, useDepthForOcclusion); 54 | editor.apply(); 55 | } 56 | 57 | /** Retrieves whether to render the depth map visualization instead of the camera feed. */ 58 | public boolean depthColorVisualizationEnabled() { 59 | return depthColorVisualizationEnabled; 60 | } 61 | 62 | public void setDepthColorVisualizationEnabled(boolean depthColorVisualizationEnabled) { 63 | this.depthColorVisualizationEnabled = depthColorVisualizationEnabled; 64 | } 65 | 66 | /** Determines if the initial prompt to use depth-based occlusion should be shown. */ 67 | public boolean shouldShowDepthEnableDialog() { 68 | // Checks if this dialog has been called before on this device. 69 | boolean showDialog = 70 | sharedPreferences.getBoolean(SHARED_PREFERENCES_SHOW_DEPTH_ENABLE_DIALOG_OOBE, true); 71 | 72 | if (showDialog) { 73 | // Only ever shows the dialog on the first time. If the user wants to adjust these settings 74 | // again, they can use the gear icon to invoke the settings menu dialog. 75 | SharedPreferences.Editor editor = sharedPreferences.edit(); 76 | editor.putBoolean(SHARED_PREFERENCES_SHOW_DEPTH_ENABLE_DIALOG_OOBE, false); 77 | editor.apply(); 78 | } 79 | 80 | return showDialog; 81 | } 82 | } 83 | -------------------------------------------------------------------------------- /app/src/main/java/com/google/ar/core/examples/java/common/helpers/DisplayRotationHelper.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.google.ar.core.examples.java.common.helpers; 17 | 18 | import android.app.Activity; 19 | import android.content.Context; 20 | import android.hardware.camera2.CameraAccessException; 21 | import android.hardware.camera2.CameraCharacteristics; 22 | import android.hardware.camera2.CameraManager; 23 | import android.hardware.display.DisplayManager; 24 | import android.hardware.display.DisplayManager.DisplayListener; 25 | import android.view.Display; 26 | import android.view.Surface; 27 | import android.view.WindowManager; 28 | import com.google.ar.core.Session; 29 | 30 | /** 31 | * Helper to track the display rotations. In particular, the 180 degree rotations are not notified 32 | * by the onSurfaceChanged() callback, and thus they require listening to the android display 33 | * events. 34 | */ 35 | public final class DisplayRotationHelper implements DisplayListener { 36 | private boolean viewportChanged; 37 | private int viewportWidth; 38 | private int viewportHeight; 39 | private final Display display; 40 | private final DisplayManager displayManager; 41 | private final CameraManager cameraManager; 42 | 43 | /** 44 | * Constructs the DisplayRotationHelper but does not register the listener yet. 45 | * 46 | * @param context the Android {@link Context}. 47 | */ 48 | public DisplayRotationHelper(Context context) { 49 | displayManager = (DisplayManager) context.getSystemService(Context.DISPLAY_SERVICE); 50 | cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE); 51 | WindowManager windowManager = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE); 52 | display = windowManager.getDefaultDisplay(); 53 | } 54 | 55 | /** Registers the display listener. Should be called from {@link Activity#onResume()}. */ 56 | public void onResume() { 57 | displayManager.registerDisplayListener(this, null); 58 | } 59 | 60 | /** Unregisters the display listener. Should be called from {@link Activity#onPause()}. */ 61 | public void onPause() { 62 | displayManager.unregisterDisplayListener(this); 63 | } 64 | 65 | /** 66 | * Records a change in surface dimensions. This will be later used by {@link 67 | * #updateSessionIfNeeded(Session)}. Should be called from {@link 68 | * android.opengl.GLSurfaceView.Renderer 69 | * #onSurfaceChanged(javax.microedition.khronos.opengles.GL10, int, int)}. 70 | * 71 | * @param width the updated width of the surface. 72 | * @param height the updated height of the surface. 73 | */ 74 | public void onSurfaceChanged(int width, int height) { 75 | viewportWidth = width; 76 | viewportHeight = height; 77 | viewportChanged = true; 78 | } 79 | 80 | /** 81 | * Updates the session display geometry if a change was posted either by {@link 82 | * #onSurfaceChanged(int, int)} call or by {@link #onDisplayChanged(int)} system callback. This 83 | * function should be called explicitly before each call to {@link Session#update()}. This 84 | * function will also clear the 'pending update' (viewportChanged) flag. 85 | * 86 | * @param session the {@link Session} object to update if display geometry changed. 87 | */ 88 | public void updateSessionIfNeeded(Session session) { 89 | if (viewportChanged) { 90 | int displayRotation = display.getRotation(); 91 | session.setDisplayGeometry(displayRotation, viewportWidth, viewportHeight); 92 | viewportChanged = false; 93 | } 94 | } 95 | 96 | /** 97 | * Returns the aspect ratio of the GL surface viewport while accounting for the display rotation 98 | * relative to the device camera sensor orientation. 99 | */ 100 | public float getCameraSensorRelativeViewportAspectRatio(String cameraId) { 101 | float aspectRatio; 102 | int cameraSensorToDisplayRotation = getCameraSensorToDisplayRotation(cameraId); 103 | switch (cameraSensorToDisplayRotation) { 104 | case 90: 105 | case 270: 106 | aspectRatio = (float) viewportHeight / (float) viewportWidth; 107 | break; 108 | case 0: 109 | case 180: 110 | aspectRatio = (float) viewportWidth / (float) viewportHeight; 111 | break; 112 | default: 113 | throw new RuntimeException("Unhandled rotation: " + cameraSensorToDisplayRotation); 114 | } 115 | return aspectRatio; 116 | } 117 | 118 | /** 119 | * Returns the rotation of the back-facing camera with respect to the display. The value is one of 120 | * 0, 90, 180, 270. 121 | */ 122 | public int getCameraSensorToDisplayRotation(String cameraId) { 123 | CameraCharacteristics characteristics; 124 | try { 125 | characteristics = cameraManager.getCameraCharacteristics(cameraId); 126 | } catch (CameraAccessException e) { 127 | throw new RuntimeException("Unable to determine display orientation", e); 128 | } 129 | 130 | // Camera sensor orientation. 131 | int sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); 132 | 133 | // Current display orientation. 134 | int displayOrientation = toDegrees(display.getRotation()); 135 | 136 | // Make sure we return 0, 90, 180, or 270 degrees. 137 | return (sensorOrientation - displayOrientation + 360) % 360; 138 | } 139 | 140 | private int toDegrees(int rotation) { 141 | switch (rotation) { 142 | case Surface.ROTATION_0: 143 | return 0; 144 | case Surface.ROTATION_90: 145 | return 90; 146 | case Surface.ROTATION_180: 147 | return 180; 148 | case Surface.ROTATION_270: 149 | return 270; 150 | default: 151 | throw new RuntimeException("Unknown rotation " + rotation); 152 | } 153 | } 154 | 155 | @Override 156 | public void onDisplayAdded(int displayId) {} 157 | 158 | @Override 159 | public void onDisplayRemoved(int displayId) {} 160 | 161 | @Override 162 | public void onDisplayChanged(int displayId) { 163 | viewportChanged = true; 164 | } 165 | } 166 | -------------------------------------------------------------------------------- /app/src/main/java/com/google/ar/core/examples/java/common/helpers/FullScreenHelper.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.google.ar.core.examples.java.common.helpers; 17 | 18 | import android.app.Activity; 19 | import android.view.View; 20 | 21 | /** Helper to set up the Android full screen mode. */ 22 | public final class FullScreenHelper { 23 | /** 24 | * Sets the Android fullscreen flags. Expected to be called from {@link 25 | * Activity#onWindowFocusChanged(boolean hasFocus)}. 26 | * 27 | * @param activity the Activity on which the full screen mode will be set. 28 | * @param hasFocus the hasFocus flag passed from the {@link Activity#onWindowFocusChanged(boolean 29 | * hasFocus)} callback. 30 | */ 31 | public static void setFullScreenOnWindowFocusChanged(Activity activity, boolean hasFocus) { 32 | if (hasFocus) { 33 | // https://developer.android.com/training/system-ui/immersive.html#sticky 34 | activity 35 | .getWindow() 36 | .getDecorView() 37 | .setSystemUiVisibility( 38 | View.SYSTEM_UI_FLAG_LAYOUT_STABLE 39 | | View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION 40 | | View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN 41 | | View.SYSTEM_UI_FLAG_HIDE_NAVIGATION 42 | | View.SYSTEM_UI_FLAG_FULLSCREEN 43 | | View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY); 44 | } 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /app/src/main/java/com/google/ar/core/examples/java/common/helpers/InstantPlacementSettings.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.google.ar.core.examples.java.common.helpers; 17 | 18 | import android.content.Context; 19 | import android.content.SharedPreferences; 20 | 21 | /** Manages the Instant Placement option setting and shared preferences. */ 22 | public class InstantPlacementSettings { 23 | public static final String SHARED_PREFERENCES_ID = "SHARED_PREFERENCES_INSTANT_PLACEMENT_OPTIONS"; 24 | public static final String SHARED_PREFERENCES_INSTANT_PLACEMENT_ENABLED = 25 | "instant_placement_enabled"; 26 | private boolean instantPlacementEnabled = true; 27 | private SharedPreferences sharedPreferences; 28 | 29 | /** Initializes the current settings based on the saved value. */ 30 | public void onCreate(Context context) { 31 | sharedPreferences = context.getSharedPreferences(SHARED_PREFERENCES_ID, Context.MODE_PRIVATE); 32 | instantPlacementEnabled = 33 | sharedPreferences.getBoolean(SHARED_PREFERENCES_INSTANT_PLACEMENT_ENABLED, false); 34 | } 35 | 36 | /** Retrieves whether Instant Placement is enabled, */ 37 | public boolean isInstantPlacementEnabled() { 38 | return instantPlacementEnabled; 39 | } 40 | 41 | public void setInstantPlacementEnabled(boolean enable) { 42 | if (enable == instantPlacementEnabled) { 43 | return; // No change. 44 | } 45 | 46 | // Updates the stored default settings. 47 | instantPlacementEnabled = enable; 48 | SharedPreferences.Editor editor = sharedPreferences.edit(); 49 | editor.putBoolean(SHARED_PREFERENCES_INSTANT_PLACEMENT_ENABLED, instantPlacementEnabled); 50 | editor.apply(); 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /app/src/main/java/com/google/ar/core/examples/java/common/helpers/SnackbarHelper.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.google.ar.core.examples.java.common.helpers; 17 | 18 | import android.app.Activity; 19 | import android.view.View; 20 | import android.widget.TextView; 21 | import com.google.android.material.snackbar.BaseTransientBottomBar; 22 | import com.google.android.material.snackbar.Snackbar; 23 | 24 | /** 25 | * Helper to manage the sample snackbar. Hides the Android boilerplate code, and exposes simpler 26 | * methods. 27 | */ 28 | public final class SnackbarHelper { 29 | private static final int BACKGROUND_COLOR = 0xbf323232; 30 | private Snackbar messageSnackbar; 31 | private enum DismissBehavior { HIDE, SHOW, FINISH }; 32 | private int maxLines = 2; 33 | private String lastMessage = ""; 34 | private View snackbarView; 35 | 36 | public boolean isShowing() { 37 | return messageSnackbar != null; 38 | } 39 | 40 | /** Shows a snackbar with a given message. */ 41 | public void showMessage(Activity activity, String message) { 42 | if (!message.isEmpty() && (!isShowing() || !lastMessage.equals(message))) { 43 | lastMessage = message; 44 | show(activity, message, DismissBehavior.HIDE); 45 | } 46 | } 47 | 48 | /** Shows a snackbar with a given message, and a dismiss button. */ 49 | public void showMessageWithDismiss(Activity activity, String message) { 50 | show(activity, message, DismissBehavior.SHOW); 51 | } 52 | 53 | /** 54 | * Shows a snackbar with a given error message. When dismissed, will finish the activity. Useful 55 | * for notifying errors, where no further interaction with the activity is possible. 56 | */ 57 | public void showError(Activity activity, String errorMessage) { 58 | show(activity, errorMessage, DismissBehavior.FINISH); 59 | } 60 | 61 | /** 62 | * Hides the currently showing snackbar, if there is one. Safe to call from any thread. Safe to 63 | * call even if snackbar is not shown. 64 | */ 65 | public void hide(Activity activity) { 66 | if (!isShowing()) { 67 | return; 68 | } 69 | lastMessage = ""; 70 | Snackbar messageSnackbarToHide = messageSnackbar; 71 | messageSnackbar = null; 72 | activity.runOnUiThread( 73 | new Runnable() { 74 | @Override 75 | public void run() { 76 | messageSnackbarToHide.dismiss(); 77 | } 78 | }); 79 | } 80 | 81 | public void setMaxLines(int lines) { 82 | maxLines = lines; 83 | } 84 | 85 | /** 86 | * Sets the view that will be used to find a suitable parent view to hold the Snackbar view. 87 | * 88 | *

To use the root layout ({@link android.R.id.content}), pass in {@code null}. 89 | * 90 | * @param snackbarView the view to pass to {@link 91 | * com.google.android.material.snackbar.Snackbar#make(…)} which will be used to find a 92 | * suitable parent, which is a {@link androidx.coordinatorlayout.widget.CoordinatorLayout}, or 93 | * the window decor's content view, whichever comes first. 94 | */ 95 | public void setParentView(View snackbarView) { 96 | this.snackbarView = snackbarView; 97 | } 98 | 99 | private void show( 100 | final Activity activity, final String message, final DismissBehavior dismissBehavior) { 101 | activity.runOnUiThread( 102 | new Runnable() { 103 | @Override 104 | public void run() { 105 | messageSnackbar = 106 | Snackbar.make( 107 | snackbarView == null 108 | ? activity.findViewById(android.R.id.content) 109 | : snackbarView, 110 | message, 111 | Snackbar.LENGTH_INDEFINITE); 112 | messageSnackbar.getView().setBackgroundColor(BACKGROUND_COLOR); 113 | if (dismissBehavior != DismissBehavior.HIDE) { 114 | messageSnackbar.setAction( 115 | "Dismiss", 116 | new View.OnClickListener() { 117 | @Override 118 | public void onClick(View v) { 119 | messageSnackbar.dismiss(); 120 | } 121 | }); 122 | if (dismissBehavior == DismissBehavior.FINISH) { 123 | messageSnackbar.addCallback( 124 | new BaseTransientBottomBar.BaseCallback() { 125 | @Override 126 | public void onDismissed(Snackbar transientBottomBar, int event) { 127 | super.onDismissed(transientBottomBar, event); 128 | activity.finish(); 129 | } 130 | }); 131 | } 132 | } 133 | ((TextView) 134 | messageSnackbar 135 | .getView() 136 | .findViewById(com.google.android.material.R.id.snackbar_text)) 137 | .setMaxLines(maxLines); 138 | messageSnackbar.show(); 139 | } 140 | }); 141 | } 142 | } 143 | -------------------------------------------------------------------------------- /app/src/main/java/com/google/ar/core/examples/java/common/helpers/TapHelper.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.google.ar.core.examples.java.common.helpers; 17 | 18 | import android.content.Context; 19 | import android.view.GestureDetector; 20 | import android.view.MotionEvent; 21 | import android.view.View; 22 | import android.view.View.OnTouchListener; 23 | import java.util.concurrent.ArrayBlockingQueue; 24 | import java.util.concurrent.BlockingQueue; 25 | 26 | /** 27 | * Helper to detect taps using Android GestureDetector, and pass the taps between UI thread and 28 | * render thread. 29 | */ 30 | public final class TapHelper implements OnTouchListener { 31 | private final GestureDetector gestureDetector; 32 | private final BlockingQueue queuedSingleTaps = new ArrayBlockingQueue<>(16); 33 | 34 | /** 35 | * Creates the tap helper. 36 | * 37 | * @param context the application's context. 38 | */ 39 | public TapHelper(Context context) { 40 | gestureDetector = 41 | new GestureDetector( 42 | context, 43 | new GestureDetector.SimpleOnGestureListener() { 44 | @Override 45 | public boolean onSingleTapUp(MotionEvent e) { 46 | // Queue tap if there is space. Tap is lost if queue is full. 47 | queuedSingleTaps.offer(e); 48 | return true; 49 | } 50 | 51 | @Override 52 | public boolean onDown(MotionEvent e) { 53 | return true; 54 | } 55 | }); 56 | } 57 | 58 | /** 59 | * Polls for a tap. 60 | * 61 | * @return if a tap was queued, a MotionEvent for the tap. Otherwise null if no taps are queued. 62 | */ 63 | public MotionEvent poll() { 64 | return queuedSingleTaps.poll(); 65 | } 66 | 67 | @Override 68 | public boolean onTouch(View view, MotionEvent motionEvent) { 69 | return gestureDetector.onTouchEvent(motionEvent); 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /app/src/main/java/com/google/ar/core/examples/java/common/helpers/TrackingStateHelper.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.google.ar.core.examples.java.common.helpers; 17 | 18 | import android.app.Activity; 19 | import android.view.WindowManager; 20 | import com.google.ar.core.Camera; 21 | import com.google.ar.core.TrackingFailureReason; 22 | import com.google.ar.core.TrackingState; 23 | 24 | /** Gets human readibly tracking failure reasons and suggested actions. */ 25 | public final class TrackingStateHelper { 26 | private static final String INSUFFICIENT_FEATURES_MESSAGE = 27 | "Can't find anything. Aim device at a surface with more texture or color."; 28 | private static final String EXCESSIVE_MOTION_MESSAGE = "Moving too fast. Slow down."; 29 | private static final String INSUFFICIENT_LIGHT_MESSAGE = 30 | "Too dark. Try moving to a well-lit area."; 31 | private static final String BAD_STATE_MESSAGE = 32 | "Tracking lost due to bad internal state. Please try restarting the AR experience."; 33 | private static final String CAMERA_UNAVAILABLE_MESSAGE = 34 | "Another app is using the camera. Tap on this app or try closing the other one."; 35 | 36 | private final Activity activity; 37 | 38 | private TrackingState previousTrackingState; 39 | 40 | public TrackingStateHelper(Activity activity) { 41 | this.activity = activity; 42 | } 43 | 44 | /** Keep the screen unlocked while tracking, but allow it to lock when tracking stops. */ 45 | public void updateKeepScreenOnFlag(TrackingState trackingState) { 46 | if (trackingState == previousTrackingState) { 47 | return; 48 | } 49 | 50 | previousTrackingState = trackingState; 51 | switch (trackingState) { 52 | case PAUSED: 53 | case STOPPED: 54 | activity.runOnUiThread( 55 | () -> activity.getWindow().clearFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON)); 56 | break; 57 | case TRACKING: 58 | activity.runOnUiThread( 59 | () -> activity.getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON)); 60 | break; 61 | } 62 | } 63 | 64 | public static String getTrackingFailureReasonString(Camera camera) { 65 | TrackingFailureReason reason = camera.getTrackingFailureReason(); 66 | switch (reason) { 67 | case NONE: 68 | return ""; 69 | case BAD_STATE: 70 | return BAD_STATE_MESSAGE; 71 | case INSUFFICIENT_LIGHT: 72 | return INSUFFICIENT_LIGHT_MESSAGE; 73 | case EXCESSIVE_MOTION: 74 | return EXCESSIVE_MOTION_MESSAGE; 75 | case INSUFFICIENT_FEATURES: 76 | return INSUFFICIENT_FEATURES_MESSAGE; 77 | case CAMERA_UNAVAILABLE: 78 | return CAMERA_UNAVAILABLE_MESSAGE; 79 | } 80 | return "Unknown tracking failure reason: " + reason; 81 | } 82 | } 83 | -------------------------------------------------------------------------------- /app/src/main/java/com/google/ar/core/examples/java/common/rendering/BackgroundRenderer.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.google.ar.core.examples.java.common.rendering; 17 | 18 | import android.content.Context; 19 | import android.opengl.GLES11Ext; 20 | import android.opengl.GLES20; 21 | import android.opengl.GLSurfaceView; 22 | import androidx.annotation.NonNull; 23 | import com.google.ar.core.Coordinates2d; 24 | import com.google.ar.core.Frame; 25 | import java.io.IOException; 26 | import java.nio.ByteBuffer; 27 | import java.nio.ByteOrder; 28 | import java.nio.FloatBuffer; 29 | import javax.microedition.khronos.egl.EGLConfig; 30 | import javax.microedition.khronos.opengles.GL10; 31 | 32 | /** 33 | * This class renders the AR background from camera feed. It creates and hosts the texture given to 34 | * ARCore to be filled with the camera image. 35 | */ 36 | public class BackgroundRenderer { 37 | private static final String TAG = BackgroundRenderer.class.getSimpleName(); 38 | 39 | // Shader names. 40 | private static final String CAMERA_VERTEX_SHADER_NAME = "shaders/screenquad.vert"; 41 | private static final String CAMERA_FRAGMENT_SHADER_NAME = "shaders/screenquad.frag"; 42 | private static final String DEPTH_VISUALIZER_VERTEX_SHADER_NAME = 43 | "shaders/background_show_depth_color_visualization.vert"; 44 | private static final String DEPTH_VISUALIZER_FRAGMENT_SHADER_NAME = 45 | "shaders/background_show_depth_color_visualization.frag"; 46 | 47 | private static final int COORDS_PER_VERTEX = 2; 48 | private static final int TEXCOORDS_PER_VERTEX = 2; 49 | private static final int FLOAT_SIZE = 4; 50 | 51 | private FloatBuffer quadCoords; 52 | private FloatBuffer quadTexCoords; 53 | 54 | private int cameraProgram; 55 | private int depthProgram; 56 | 57 | private int cameraPositionAttrib; 58 | private int cameraTexCoordAttrib; 59 | private int cameraTextureUniform; 60 | private int cameraTextureId = -1; 61 | private boolean suppressTimestampZeroRendering = true; 62 | 63 | private int depthPositionAttrib; 64 | private int depthTexCoordAttrib; 65 | private int depthTextureUniform; 66 | private int depthTextureId = -1; 67 | 68 | public int getTextureId() { 69 | return cameraTextureId; 70 | } 71 | 72 | /** 73 | * Allocates and initializes OpenGL resources needed by the background renderer. Must be called on 74 | * the OpenGL thread, typically in {@link GLSurfaceView.Renderer#onSurfaceCreated(GL10, 75 | * EGLConfig)}. 76 | * 77 | * @param context Needed to access shader source. 78 | */ 79 | public void createOnGlThread(Context context, int depthTextureId) throws IOException { 80 | // Generate the background texture. 81 | int[] textures = new int[1]; 82 | GLES20.glGenTextures(1, textures, 0); 83 | cameraTextureId = textures[0]; 84 | int textureTarget = GLES11Ext.GL_TEXTURE_EXTERNAL_OES; 85 | GLES20.glBindTexture(textureTarget, cameraTextureId); 86 | GLES20.glTexParameteri(textureTarget, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); 87 | GLES20.glTexParameteri(textureTarget, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); 88 | GLES20.glTexParameteri(textureTarget, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR); 89 | GLES20.glTexParameteri(textureTarget, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); 90 | 91 | int numVertices = 4; 92 | if (numVertices != QUAD_COORDS.length / COORDS_PER_VERTEX) { 93 | throw new RuntimeException("Unexpected number of vertices in BackgroundRenderer."); 94 | } 95 | 96 | ByteBuffer bbCoords = ByteBuffer.allocateDirect(QUAD_COORDS.length * FLOAT_SIZE); 97 | bbCoords.order(ByteOrder.nativeOrder()); 98 | quadCoords = bbCoords.asFloatBuffer(); 99 | quadCoords.put(QUAD_COORDS); 100 | quadCoords.position(0); 101 | 102 | ByteBuffer bbTexCoordsTransformed = 103 | ByteBuffer.allocateDirect(numVertices * TEXCOORDS_PER_VERTEX * FLOAT_SIZE); 104 | bbTexCoordsTransformed.order(ByteOrder.nativeOrder()); 105 | quadTexCoords = bbTexCoordsTransformed.asFloatBuffer(); 106 | 107 | // Load render camera feed shader. 108 | { 109 | int vertexShader = 110 | ShaderUtil.loadGLShader(TAG, context, GLES20.GL_VERTEX_SHADER, CAMERA_VERTEX_SHADER_NAME); 111 | int fragmentShader = 112 | ShaderUtil.loadGLShader( 113 | TAG, context, GLES20.GL_FRAGMENT_SHADER, CAMERA_FRAGMENT_SHADER_NAME); 114 | 115 | cameraProgram = GLES20.glCreateProgram(); 116 | GLES20.glAttachShader(cameraProgram, vertexShader); 117 | GLES20.glAttachShader(cameraProgram, fragmentShader); 118 | GLES20.glLinkProgram(cameraProgram); 119 | GLES20.glUseProgram(cameraProgram); 120 | cameraPositionAttrib = GLES20.glGetAttribLocation(cameraProgram, "a_Position"); 121 | cameraTexCoordAttrib = GLES20.glGetAttribLocation(cameraProgram, "a_TexCoord"); 122 | ShaderUtil.checkGLError(TAG, "Program creation"); 123 | 124 | cameraTextureUniform = GLES20.glGetUniformLocation(cameraProgram, "sTexture"); 125 | ShaderUtil.checkGLError(TAG, "Program parameters"); 126 | } 127 | 128 | // Load render depth map shader. 129 | { 130 | int vertexShader = 131 | ShaderUtil.loadGLShader( 132 | TAG, context, GLES20.GL_VERTEX_SHADER, DEPTH_VISUALIZER_VERTEX_SHADER_NAME); 133 | int fragmentShader = 134 | ShaderUtil.loadGLShader( 135 | TAG, context, GLES20.GL_FRAGMENT_SHADER, DEPTH_VISUALIZER_FRAGMENT_SHADER_NAME); 136 | 137 | depthProgram = GLES20.glCreateProgram(); 138 | GLES20.glAttachShader(depthProgram, vertexShader); 139 | GLES20.glAttachShader(depthProgram, fragmentShader); 140 | GLES20.glLinkProgram(depthProgram); 141 | GLES20.glUseProgram(depthProgram); 142 | depthPositionAttrib = GLES20.glGetAttribLocation(depthProgram, "a_Position"); 143 | depthTexCoordAttrib = GLES20.glGetAttribLocation(depthProgram, "a_TexCoord"); 144 | ShaderUtil.checkGLError(TAG, "Program creation"); 145 | 146 | depthTextureUniform = GLES20.glGetUniformLocation(depthProgram, "u_DepthTexture"); 147 | ShaderUtil.checkGLError(TAG, "Program parameters"); 148 | } 149 | 150 | this.depthTextureId = depthTextureId; 151 | } 152 | 153 | public void createOnGlThread(Context context) throws IOException { 154 | createOnGlThread(context, /*depthTextureId=*/ -1); 155 | } 156 | 157 | public void suppressTimestampZeroRendering(boolean suppressTimestampZeroRendering) { 158 | this.suppressTimestampZeroRendering = suppressTimestampZeroRendering; 159 | } 160 | 161 | /** 162 | * Draws the AR background image. The image will be drawn such that virtual content rendered with 163 | * the matrices provided by {@link com.google.ar.core.Camera#getViewMatrix(float[], int)} and 164 | * {@link com.google.ar.core.Camera#getProjectionMatrix(float[], int, float, float)} will 165 | * accurately follow static physical objects. This must be called before drawing virtual 166 | * content. 167 | * 168 | * @param frame The current {@code Frame} as returned by {@link Session#update()}. 169 | * @param debugShowDepthMap Toggles whether to show the live camera feed or latest depth image. 170 | */ 171 | public void draw(@NonNull Frame frame, boolean debugShowDepthMap) { 172 | // If display rotation changed (also includes view size change), we need to re-query the uv 173 | // coordinates for the screen rect, as they may have changed as well. 174 | if (frame.hasDisplayGeometryChanged()) { 175 | frame.transformCoordinates2d( 176 | Coordinates2d.OPENGL_NORMALIZED_DEVICE_COORDINATES, 177 | quadCoords, 178 | Coordinates2d.TEXTURE_NORMALIZED, 179 | quadTexCoords); 180 | } 181 | 182 | if (frame.getTimestamp() == 0 && suppressTimestampZeroRendering) { 183 | // Suppress rendering if the camera did not produce the first frame yet. This is to avoid 184 | // drawing possible leftover data from previous sessions if the texture is reused. 185 | return; 186 | } 187 | 188 | draw(debugShowDepthMap); 189 | } 190 | 191 | public void draw(@NonNull Frame frame) { 192 | draw(frame, /*debugShowDepthMap=*/ false); 193 | } 194 | 195 | /** 196 | * Draws the camera image using the currently configured {@link BackgroundRenderer#quadTexCoords} 197 | * image texture coordinates. 198 | * 199 | *

The image will be center cropped if the camera sensor aspect ratio does not match the screen 200 | * aspect ratio, which matches the cropping behavior of {@link 201 | * Frame#transformCoordinates2d(Coordinates2d, float[], Coordinates2d, float[])}. 202 | */ 203 | public void draw( 204 | int imageWidth, int imageHeight, float screenAspectRatio, int cameraToDisplayRotation) { 205 | // Crop the camera image to fit the screen aspect ratio. 206 | float imageAspectRatio = (float) imageWidth / imageHeight; 207 | float croppedWidth; 208 | float croppedHeight; 209 | if (screenAspectRatio < imageAspectRatio) { 210 | croppedWidth = imageHeight * screenAspectRatio; 211 | croppedHeight = imageHeight; 212 | } else { 213 | croppedWidth = imageWidth; 214 | croppedHeight = imageWidth / screenAspectRatio; 215 | } 216 | 217 | float u = (imageWidth - croppedWidth) / imageWidth * 0.5f; 218 | float v = (imageHeight - croppedHeight) / imageHeight * 0.5f; 219 | 220 | float[] texCoordTransformed; 221 | switch (cameraToDisplayRotation) { 222 | case 90: 223 | texCoordTransformed = new float[] {1 - u, 1 - v, 1 - u, v, u, 1 - v, u, v}; 224 | break; 225 | case 180: 226 | texCoordTransformed = new float[] {1 - u, v, u, v, 1 - u, 1 - v, u, 1 - v}; 227 | break; 228 | case 270: 229 | texCoordTransformed = new float[] {u, v, u, 1 - v, 1 - u, v, 1 - u, 1 - v}; 230 | break; 231 | case 0: 232 | texCoordTransformed = new float[] {u, 1 - v, 1 - u, 1 - v, u, v, 1 - u, v}; 233 | break; 234 | default: 235 | throw new IllegalArgumentException("Unhandled rotation: " + cameraToDisplayRotation); 236 | } 237 | 238 | // Write image texture coordinates. 239 | quadTexCoords.position(0); 240 | quadTexCoords.put(texCoordTransformed); 241 | 242 | draw(/*debugShowDepthMap=*/ false); 243 | } 244 | 245 | /** 246 | * Draws the camera background image using the currently configured {@link 247 | * BackgroundRenderer#quadTexCoords} image texture coordinates. 248 | */ 249 | private void draw(boolean debugShowDepthMap) { 250 | // Ensure position is rewound before use. 251 | quadTexCoords.position(0); 252 | 253 | // No need to test or write depth, the screen quad has arbitrary depth, and is expected 254 | // to be drawn first. 255 | GLES20.glDisable(GLES20.GL_DEPTH_TEST); 256 | GLES20.glDepthMask(false); 257 | 258 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0); 259 | 260 | if (debugShowDepthMap) { 261 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, depthTextureId); 262 | GLES20.glUseProgram(depthProgram); 263 | GLES20.glUniform1i(depthTextureUniform, 0); 264 | 265 | // Set the vertex positions and texture coordinates. 266 | GLES20.glVertexAttribPointer( 267 | depthPositionAttrib, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, 0, quadCoords); 268 | GLES20.glVertexAttribPointer( 269 | depthTexCoordAttrib, TEXCOORDS_PER_VERTEX, GLES20.GL_FLOAT, false, 0, quadTexCoords); 270 | GLES20.glEnableVertexAttribArray(depthPositionAttrib); 271 | GLES20.glEnableVertexAttribArray(depthTexCoordAttrib); 272 | } else { 273 | GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, cameraTextureId); 274 | GLES20.glUseProgram(cameraProgram); 275 | GLES20.glUniform1i(cameraTextureUniform, 0); 276 | 277 | // Set the vertex positions and texture coordinates. 278 | GLES20.glVertexAttribPointer( 279 | cameraPositionAttrib, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, 0, quadCoords); 280 | GLES20.glVertexAttribPointer( 281 | cameraTexCoordAttrib, TEXCOORDS_PER_VERTEX, GLES20.GL_FLOAT, false, 0, quadTexCoords); 282 | GLES20.glEnableVertexAttribArray(cameraPositionAttrib); 283 | GLES20.glEnableVertexAttribArray(cameraTexCoordAttrib); 284 | } 285 | 286 | GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); 287 | 288 | // Disable vertex arrays 289 | if (debugShowDepthMap) { 290 | GLES20.glDisableVertexAttribArray(depthPositionAttrib); 291 | GLES20.glDisableVertexAttribArray(depthTexCoordAttrib); 292 | } else { 293 | GLES20.glDisableVertexAttribArray(cameraPositionAttrib); 294 | GLES20.glDisableVertexAttribArray(cameraTexCoordAttrib); 295 | } 296 | 297 | // Restore the depth state for further drawing. 298 | GLES20.glDepthMask(true); 299 | GLES20.glEnable(GLES20.GL_DEPTH_TEST); 300 | 301 | ShaderUtil.checkGLError(TAG, "BackgroundRendererDraw"); 302 | } 303 | 304 | /** 305 | * (-1, 1) ------- (1, 1) 306 | * | \ | 307 | * | \ | 308 | * | \ | 309 | * | \ | 310 | * (-1, -1) ------ (1, -1) 311 | * Ensure triangles are front-facing, to support glCullFace(). 312 | * This quad will be drawn using GL_TRIANGLE_STRIP which draws two 313 | * triangles: v0->v1->v2, then v2->v1->v3. 314 | */ 315 | private static final float[] QUAD_COORDS = 316 | new float[] { 317 | -1.0f, -1.0f, +1.0f, -1.0f, -1.0f, +1.0f, +1.0f, +1.0f, 318 | }; 319 | } 320 | -------------------------------------------------------------------------------- /app/src/main/java/com/google/ar/core/examples/java/common/rendering/ObjectRenderer.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.google.ar.core.examples.java.common.rendering; 17 | 18 | import android.content.Context; 19 | import android.graphics.Bitmap; 20 | import android.graphics.BitmapFactory; 21 | import android.opengl.GLES20; 22 | import android.opengl.GLUtils; 23 | import android.opengl.Matrix; 24 | import de.javagl.obj.Obj; 25 | import de.javagl.obj.ObjData; 26 | import de.javagl.obj.ObjReader; 27 | import de.javagl.obj.ObjUtils; 28 | import java.io.IOException; 29 | import java.io.InputStream; 30 | import java.nio.ByteBuffer; 31 | import java.nio.ByteOrder; 32 | import java.nio.FloatBuffer; 33 | import java.nio.IntBuffer; 34 | import java.nio.ShortBuffer; 35 | import java.util.Map; 36 | import java.util.TreeMap; 37 | 38 | /** Renders an object loaded from an OBJ file in OpenGL. */ 39 | public class ObjectRenderer { 40 | private static final String TAG = ObjectRenderer.class.getSimpleName(); 41 | 42 | /** 43 | * Blend mode. 44 | * 45 | * @see #setBlendMode(BlendMode) 46 | */ 47 | public enum BlendMode { 48 | /** Multiplies the destination color by the source alpha, without z-buffer writing. */ 49 | Shadow, 50 | /** Normal alpha blending with z-buffer writing. */ 51 | AlphaBlending 52 | } 53 | 54 | // Shader names. 55 | private static final String VERTEX_SHADER_NAME = "shaders/ar_object.vert"; 56 | private static final String FRAGMENT_SHADER_NAME = "shaders/ar_object.frag"; 57 | 58 | private static final int COORDS_PER_VERTEX = 3; 59 | private static final float[] DEFAULT_COLOR = new float[] {0f, 0f, 0f, 0f}; 60 | 61 | // Note: the last component must be zero to avoid applying the translational part of the matrix. 62 | private static final float[] LIGHT_DIRECTION = new float[] {0.250f, 0.866f, 0.433f, 0.0f}; 63 | private final float[] viewLightDirection = new float[4]; 64 | 65 | // Object vertex buffer variables. 66 | private int vertexBufferId; 67 | private int verticesBaseAddress; 68 | private int texCoordsBaseAddress; 69 | private int normalsBaseAddress; 70 | private int indexBufferId; 71 | private int indexCount; 72 | 73 | private int program; 74 | private final int[] textures = new int[1]; 75 | 76 | // Shader location: model view projection matrix. 77 | private int modelViewUniform; 78 | private int modelViewProjectionUniform; 79 | 80 | // Shader location: object attributes. 81 | private int positionAttribute; 82 | private int normalAttribute; 83 | private int texCoordAttribute; 84 | 85 | // Shader location: texture sampler. 86 | private int textureUniform; 87 | 88 | // Shader location: environment properties. 89 | private int lightingParametersUniform; 90 | 91 | // Shader location: material properties. 92 | private int materialParametersUniform; 93 | 94 | // Shader location: color correction property. 95 | private int colorCorrectionParameterUniform; 96 | 97 | // Shader location: object color property (to change the primary color of the object). 98 | private int colorUniform; 99 | 100 | // Shader location: depth texture. 101 | private int depthTextureUniform; 102 | 103 | // Shader location: transform to depth uvs. 104 | private int depthUvTransformUniform; 105 | 106 | // Shader location: the aspect ratio of the depth texture. 107 | private int depthAspectRatioUniform; 108 | 109 | private BlendMode blendMode = null; 110 | 111 | // Temporary matrices allocated here to reduce number of allocations for each frame. 112 | private final float[] modelMatrix = new float[16]; 113 | private final float[] modelViewMatrix = new float[16]; 114 | private final float[] modelViewProjectionMatrix = new float[16]; 115 | 116 | // Set some default material properties to use for lighting. 117 | private float ambient = 0.3f; 118 | private float diffuse = 1.0f; 119 | private float specular = 1.0f; 120 | private float specularPower = 6.0f; 121 | 122 | // Depth-for-Occlusion parameters. 123 | private static final String USE_DEPTH_FOR_OCCLUSION_SHADER_FLAG = "USE_DEPTH_FOR_OCCLUSION"; 124 | private boolean useDepthForOcclusion = false; 125 | private float depthAspectRatio = 0.0f; 126 | private float[] uvTransform = null; 127 | private int depthTextureId; 128 | 129 | /** 130 | * Creates and initializes OpenGL resources needed for rendering the model. 131 | * 132 | * @param context Context for loading the shader and below-named model and texture assets. 133 | * @param objAssetName Name of the OBJ file containing the model geometry. 134 | * @param diffuseTextureAssetName Name of the PNG file containing the diffuse texture map. 135 | */ 136 | public void createOnGlThread(Context context, String objAssetName, String diffuseTextureAssetName) 137 | throws IOException { 138 | // Compiles and loads the shader based on the current configuration. 139 | compileAndLoadShaderProgram(context); 140 | 141 | // Read the texture. 142 | Bitmap textureBitmap = 143 | BitmapFactory.decodeStream(context.getAssets().open(diffuseTextureAssetName)); 144 | 145 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0); 146 | GLES20.glGenTextures(textures.length, textures, 0); 147 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]); 148 | 149 | GLES20.glTexParameteri( 150 | GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR_MIPMAP_LINEAR); 151 | GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); 152 | GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, textureBitmap, 0); 153 | GLES20.glGenerateMipmap(GLES20.GL_TEXTURE_2D); 154 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0); 155 | 156 | textureBitmap.recycle(); 157 | 158 | ShaderUtil.checkGLError(TAG, "Texture loading"); 159 | 160 | // Read the obj file. 161 | InputStream objInputStream = context.getAssets().open(objAssetName); 162 | Obj obj = ObjReader.read(objInputStream); 163 | 164 | // Prepare the Obj so that its structure is suitable for 165 | // rendering with OpenGL: 166 | // 1. Triangulate it 167 | // 2. Make sure that texture coordinates are not ambiguous 168 | // 3. Make sure that normals are not ambiguous 169 | // 4. Convert it to single-indexed data 170 | obj = ObjUtils.convertToRenderable(obj); 171 | 172 | // OpenGL does not use Java arrays. ByteBuffers are used instead to provide data in a format 173 | // that OpenGL understands. 174 | 175 | // Obtain the data from the OBJ, as direct buffers: 176 | IntBuffer wideIndices = ObjData.getFaceVertexIndices(obj, 3); 177 | FloatBuffer vertices = ObjData.getVertices(obj); 178 | FloatBuffer texCoords = ObjData.getTexCoords(obj, 2); 179 | FloatBuffer normals = ObjData.getNormals(obj); 180 | 181 | // Convert int indices to shorts for GL ES 2.0 compatibility 182 | ShortBuffer indices = 183 | ByteBuffer.allocateDirect(2 * wideIndices.limit()) 184 | .order(ByteOrder.nativeOrder()) 185 | .asShortBuffer(); 186 | while (wideIndices.hasRemaining()) { 187 | indices.put((short) wideIndices.get()); 188 | } 189 | indices.rewind(); 190 | 191 | int[] buffers = new int[2]; 192 | GLES20.glGenBuffers(2, buffers, 0); 193 | vertexBufferId = buffers[0]; 194 | indexBufferId = buffers[1]; 195 | 196 | // Load vertex buffer 197 | verticesBaseAddress = 0; 198 | texCoordsBaseAddress = verticesBaseAddress + 4 * vertices.limit(); 199 | normalsBaseAddress = texCoordsBaseAddress + 4 * texCoords.limit(); 200 | final int totalBytes = normalsBaseAddress + 4 * normals.limit(); 201 | 202 | GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vertexBufferId); 203 | GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, totalBytes, null, GLES20.GL_STATIC_DRAW); 204 | GLES20.glBufferSubData( 205 | GLES20.GL_ARRAY_BUFFER, verticesBaseAddress, 4 * vertices.limit(), vertices); 206 | GLES20.glBufferSubData( 207 | GLES20.GL_ARRAY_BUFFER, texCoordsBaseAddress, 4 * texCoords.limit(), texCoords); 208 | GLES20.glBufferSubData( 209 | GLES20.GL_ARRAY_BUFFER, normalsBaseAddress, 4 * normals.limit(), normals); 210 | GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0); 211 | 212 | // Load index buffer 213 | GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, indexBufferId); 214 | indexCount = indices.limit(); 215 | GLES20.glBufferData( 216 | GLES20.GL_ELEMENT_ARRAY_BUFFER, 2 * indexCount, indices, GLES20.GL_STATIC_DRAW); 217 | GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, 0); 218 | 219 | ShaderUtil.checkGLError(TAG, "OBJ buffer load"); 220 | 221 | Matrix.setIdentityM(modelMatrix, 0); 222 | } 223 | 224 | /** 225 | * Selects the blending mode for rendering. 226 | * 227 | * @param blendMode The blending mode. Null indicates no blending (opaque rendering). 228 | */ 229 | public void setBlendMode(BlendMode blendMode) { 230 | this.blendMode = blendMode; 231 | } 232 | 233 | /** 234 | * Specifies whether to use the depth texture to perform depth-based occlusion of virtual objects 235 | * from real-world geometry. 236 | * 237 | *

This function is a no-op if the value provided is the same as what is already set. If the 238 | * value changes, this function will recompile and reload the shader program to either 239 | * enable/disable depth-based occlusion. NOTE: recompilation of the shader is inefficient. This 240 | * code could be optimized to precompile both versions of the shader. 241 | * 242 | * @param context Context for loading the shader. 243 | * @param useDepthForOcclusion Specifies whether to use the depth texture to perform occlusion 244 | * during rendering of virtual objects. 245 | */ 246 | public void setUseDepthForOcclusion(Context context, boolean useDepthForOcclusion) 247 | throws IOException { 248 | if (this.useDepthForOcclusion == useDepthForOcclusion) { 249 | return; // No change, does nothing. 250 | } 251 | 252 | // Toggles the occlusion rendering mode and recompiles the shader. 253 | this.useDepthForOcclusion = useDepthForOcclusion; 254 | compileAndLoadShaderProgram(context); 255 | } 256 | 257 | private void compileAndLoadShaderProgram(Context context) throws IOException { 258 | // Compiles and loads the shader program based on the selected mode. 259 | Map defineValuesMap = new TreeMap<>(); 260 | defineValuesMap.put(USE_DEPTH_FOR_OCCLUSION_SHADER_FLAG, useDepthForOcclusion ? 1 : 0); 261 | 262 | final int vertexShader = 263 | ShaderUtil.loadGLShader(TAG, context, GLES20.GL_VERTEX_SHADER, VERTEX_SHADER_NAME); 264 | final int fragmentShader = 265 | ShaderUtil.loadGLShader( 266 | TAG, context, GLES20.GL_FRAGMENT_SHADER, FRAGMENT_SHADER_NAME, defineValuesMap); 267 | 268 | program = GLES20.glCreateProgram(); 269 | GLES20.glAttachShader(program, vertexShader); 270 | GLES20.glAttachShader(program, fragmentShader); 271 | GLES20.glLinkProgram(program); 272 | GLES20.glUseProgram(program); 273 | 274 | ShaderUtil.checkGLError(TAG, "Program creation"); 275 | 276 | modelViewUniform = GLES20.glGetUniformLocation(program, "u_ModelView"); 277 | modelViewProjectionUniform = GLES20.glGetUniformLocation(program, "u_ModelViewProjection"); 278 | 279 | positionAttribute = GLES20.glGetAttribLocation(program, "a_Position"); 280 | normalAttribute = GLES20.glGetAttribLocation(program, "a_Normal"); 281 | texCoordAttribute = GLES20.glGetAttribLocation(program, "a_TexCoord"); 282 | 283 | textureUniform = GLES20.glGetUniformLocation(program, "u_Texture"); 284 | 285 | lightingParametersUniform = GLES20.glGetUniformLocation(program, "u_LightingParameters"); 286 | materialParametersUniform = GLES20.glGetUniformLocation(program, "u_MaterialParameters"); 287 | colorCorrectionParameterUniform = 288 | GLES20.glGetUniformLocation(program, "u_ColorCorrectionParameters"); 289 | colorUniform = GLES20.glGetUniformLocation(program, "u_ObjColor"); 290 | 291 | // Occlusion Uniforms. 292 | if (useDepthForOcclusion) { 293 | depthTextureUniform = GLES20.glGetUniformLocation(program, "u_DepthTexture"); 294 | depthUvTransformUniform = GLES20.glGetUniformLocation(program, "u_DepthUvTransform"); 295 | depthAspectRatioUniform = GLES20.glGetUniformLocation(program, "u_DepthAspectRatio"); 296 | } 297 | 298 | ShaderUtil.checkGLError(TAG, "Program parameters"); 299 | } 300 | 301 | /** 302 | * Updates the object model matrix and applies scaling. 303 | * 304 | * @param modelMatrix A 4x4 model-to-world transformation matrix, stored in column-major order. 305 | * @param scaleFactor A separate scaling factor to apply before the {@code modelMatrix}. 306 | * @see android.opengl.Matrix 307 | */ 308 | public void updateModelMatrix(float[] modelMatrix, float scaleFactor) { 309 | float[] scaleMatrix = new float[16]; 310 | Matrix.setIdentityM(scaleMatrix, 0); 311 | scaleMatrix[0] = scaleFactor; 312 | scaleMatrix[5] = scaleFactor; 313 | scaleMatrix[10] = scaleFactor; 314 | Matrix.multiplyMM(this.modelMatrix, 0, modelMatrix, 0, scaleMatrix, 0); 315 | } 316 | 317 | /** 318 | * Sets the surface characteristics of the rendered model. 319 | * 320 | * @param ambient Intensity of non-directional surface illumination. 321 | * @param diffuse Diffuse (matte) surface reflectivity. 322 | * @param specular Specular (shiny) surface reflectivity. 323 | * @param specularPower Surface shininess. Larger values result in a smaller, sharper specular 324 | * highlight. 325 | */ 326 | public void setMaterialProperties( 327 | float ambient, float diffuse, float specular, float specularPower) { 328 | this.ambient = ambient; 329 | this.diffuse = diffuse; 330 | this.specular = specular; 331 | this.specularPower = specularPower; 332 | } 333 | 334 | /** 335 | * Draws the model. 336 | * 337 | * @param cameraView A 4x4 view matrix, in column-major order. 338 | * @param cameraPerspective A 4x4 projection matrix, in column-major order. 339 | * @param colorCorrectionRgba Illumination intensity. Combined with diffuse and specular material 340 | * properties. 341 | * @see #setBlendMode(BlendMode) 342 | * @see #updateModelMatrix(float[], float) 343 | * @see #setMaterialProperties(float, float, float, float) 344 | * @see android.opengl.Matrix 345 | */ 346 | public void draw(float[] cameraView, float[] cameraPerspective, float[] colorCorrectionRgba) { 347 | draw(cameraView, cameraPerspective, colorCorrectionRgba, DEFAULT_COLOR); 348 | } 349 | 350 | public void draw( 351 | float[] cameraView, 352 | float[] cameraPerspective, 353 | float[] colorCorrectionRgba, 354 | float[] objColor) { 355 | 356 | ShaderUtil.checkGLError(TAG, "Before draw"); 357 | 358 | // Build the ModelView and ModelViewProjection matrices 359 | // for calculating object position and light. 360 | Matrix.multiplyMM(modelViewMatrix, 0, cameraView, 0, modelMatrix, 0); 361 | Matrix.multiplyMM(modelViewProjectionMatrix, 0, cameraPerspective, 0, modelViewMatrix, 0); 362 | 363 | GLES20.glUseProgram(program); 364 | 365 | // Set the lighting environment properties. 366 | Matrix.multiplyMV(viewLightDirection, 0, modelViewMatrix, 0, LIGHT_DIRECTION, 0); 367 | normalizeVec3(viewLightDirection); 368 | GLES20.glUniform4f( 369 | lightingParametersUniform, 370 | viewLightDirection[0], 371 | viewLightDirection[1], 372 | viewLightDirection[2], 373 | 1.f); 374 | GLES20.glUniform4fv(colorCorrectionParameterUniform, 1, colorCorrectionRgba, 0); 375 | 376 | // Set the object color property. 377 | GLES20.glUniform4fv(colorUniform, 1, objColor, 0); 378 | 379 | // Set the object material properties. 380 | GLES20.glUniform4f(materialParametersUniform, ambient, diffuse, specular, specularPower); 381 | 382 | // Attach the object texture. 383 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0); 384 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]); 385 | GLES20.glUniform1i(textureUniform, 0); 386 | 387 | // Occlusion parameters. 388 | if (useDepthForOcclusion) { 389 | // Attach the depth texture. 390 | GLES20.glActiveTexture(GLES20.GL_TEXTURE1); 391 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, depthTextureId); 392 | GLES20.glUniform1i(depthTextureUniform, 1); 393 | 394 | // Set the depth texture uv transform. 395 | GLES20.glUniformMatrix3fv(depthUvTransformUniform, 1, false, uvTransform, 0); 396 | GLES20.glUniform1f(depthAspectRatioUniform, depthAspectRatio); 397 | } 398 | 399 | // Set the vertex attributes. 400 | GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vertexBufferId); 401 | 402 | GLES20.glVertexAttribPointer( 403 | positionAttribute, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, 0, verticesBaseAddress); 404 | GLES20.glVertexAttribPointer(normalAttribute, 3, GLES20.GL_FLOAT, false, 0, normalsBaseAddress); 405 | GLES20.glVertexAttribPointer( 406 | texCoordAttribute, 2, GLES20.GL_FLOAT, false, 0, texCoordsBaseAddress); 407 | 408 | GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0); 409 | 410 | // Set the ModelViewProjection matrix in the shader. 411 | GLES20.glUniformMatrix4fv(modelViewUniform, 1, false, modelViewMatrix, 0); 412 | GLES20.glUniformMatrix4fv(modelViewProjectionUniform, 1, false, modelViewProjectionMatrix, 0); 413 | 414 | // Enable vertex arrays 415 | GLES20.glEnableVertexAttribArray(positionAttribute); 416 | GLES20.glEnableVertexAttribArray(normalAttribute); 417 | GLES20.glEnableVertexAttribArray(texCoordAttribute); 418 | 419 | if (blendMode != null) { 420 | GLES20.glEnable(GLES20.GL_BLEND); 421 | switch (blendMode) { 422 | case Shadow: 423 | // Multiplicative blending function for Shadow. 424 | GLES20.glDepthMask(false); 425 | GLES20.glBlendFunc(GLES20.GL_ZERO, GLES20.GL_ONE_MINUS_SRC_ALPHA); 426 | break; 427 | case AlphaBlending: 428 | // Alpha blending function, with the depth mask enabled. 429 | GLES20.glDepthMask(true); 430 | 431 | // Textures are loaded with premultiplied alpha 432 | // (https://developer.android.com/reference/android/graphics/BitmapFactory.Options#inPremultiplied), 433 | // so we use the premultiplied alpha blend factors. 434 | GLES20.glBlendFunc(GLES20.GL_ONE, GLES20.GL_ONE_MINUS_SRC_ALPHA); 435 | break; 436 | } 437 | } 438 | 439 | GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, indexBufferId); 440 | GLES20.glDrawElements(GLES20.GL_TRIANGLES, indexCount, GLES20.GL_UNSIGNED_SHORT, 0); 441 | GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, 0); 442 | 443 | if (blendMode != null) { 444 | GLES20.glDisable(GLES20.GL_BLEND); 445 | GLES20.glDepthMask(true); 446 | } 447 | 448 | // Disable vertex arrays 449 | GLES20.glDisableVertexAttribArray(positionAttribute); 450 | GLES20.glDisableVertexAttribArray(normalAttribute); 451 | GLES20.glDisableVertexAttribArray(texCoordAttribute); 452 | 453 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0); 454 | 455 | ShaderUtil.checkGLError(TAG, "After draw"); 456 | } 457 | 458 | private static void normalizeVec3(float[] v) { 459 | float reciprocalLength = 1.0f / (float) Math.sqrt(v[0] * v[0] + v[1] * v[1] + v[2] * v[2]); 460 | v[0] *= reciprocalLength; 461 | v[1] *= reciprocalLength; 462 | v[2] *= reciprocalLength; 463 | } 464 | 465 | public void setUvTransformMatrix(float[] transform) { 466 | uvTransform = transform; 467 | } 468 | 469 | public void setDepthTexture(int textureId, int width, int height) { 470 | depthTextureId = textureId; 471 | depthAspectRatio = (float) width / (float) height; 472 | } 473 | } 474 | -------------------------------------------------------------------------------- /app/src/main/java/com/google/ar/core/examples/java/common/rendering/PlaneRenderer.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.google.ar.core.examples.java.common.rendering; 17 | 18 | import android.content.Context; 19 | import android.graphics.Bitmap; 20 | import android.graphics.BitmapFactory; 21 | import android.opengl.GLES20; 22 | import android.opengl.GLSurfaceView; 23 | import android.opengl.GLUtils; 24 | import android.opengl.Matrix; 25 | import com.google.ar.core.Camera; 26 | import com.google.ar.core.Plane; 27 | import com.google.ar.core.Pose; 28 | import com.google.ar.core.TrackingState; 29 | import java.io.IOException; 30 | import java.nio.ByteBuffer; 31 | import java.nio.ByteOrder; 32 | import java.nio.FloatBuffer; 33 | import java.nio.ShortBuffer; 34 | import java.util.ArrayList; 35 | import java.util.Collection; 36 | import java.util.Collections; 37 | import java.util.Comparator; 38 | import java.util.HashMap; 39 | import java.util.List; 40 | import java.util.Map; 41 | 42 | /** Renders the detected AR planes. */ 43 | public class PlaneRenderer { 44 | private static final String TAG = PlaneRenderer.class.getSimpleName(); 45 | 46 | // Shader names. 47 | private static final String VERTEX_SHADER_NAME = "shaders/plane.vert"; 48 | private static final String FRAGMENT_SHADER_NAME = "shaders/plane.frag"; 49 | 50 | private static final int BYTES_PER_FLOAT = Float.SIZE / 8; 51 | private static final int BYTES_PER_SHORT = Short.SIZE / 8; 52 | private static final int COORDS_PER_VERTEX = 3; // x, z, alpha 53 | 54 | private static final int VERTS_PER_BOUNDARY_VERT = 2; 55 | private static final int INDICES_PER_BOUNDARY_VERT = 3; 56 | private static final int INITIAL_BUFFER_BOUNDARY_VERTS = 64; 57 | 58 | private static final int INITIAL_VERTEX_BUFFER_SIZE_BYTES = 59 | BYTES_PER_FLOAT * COORDS_PER_VERTEX * VERTS_PER_BOUNDARY_VERT * INITIAL_BUFFER_BOUNDARY_VERTS; 60 | 61 | private static final int INITIAL_INDEX_BUFFER_SIZE_BYTES = 62 | BYTES_PER_SHORT 63 | * INDICES_PER_BOUNDARY_VERT 64 | * INDICES_PER_BOUNDARY_VERT 65 | * INITIAL_BUFFER_BOUNDARY_VERTS; 66 | 67 | private static final float FADE_RADIUS_M = 0.25f; 68 | private static final float DOTS_PER_METER = 10.0f; 69 | private static final float EQUILATERAL_TRIANGLE_SCALE = (float) (1 / Math.sqrt(3)); 70 | 71 | // Using the "signed distance field" approach to render sharp lines and circles. 72 | // {dotThreshold, lineThreshold, lineFadeSpeed, occlusionScale} 73 | // dotThreshold/lineThreshold: red/green intensity above which dots/lines are present 74 | // lineFadeShrink: lines will fade in between alpha = 1-(1/lineFadeShrink) and 1.0 75 | // occlusionShrink: occluded planes will fade out between alpha = 0 and 1/occlusionShrink 76 | private static final float[] GRID_CONTROL = {0.2f, 0.4f, 2.0f, 1.5f}; 77 | 78 | private int planeProgram; 79 | private final int[] textures = new int[1]; 80 | 81 | private int planeXZPositionAlphaAttribute; 82 | 83 | private int planeModelUniform; 84 | private int planeNormalUniform; 85 | private int planeModelViewProjectionUniform; 86 | private int textureUniform; 87 | private int gridControlUniform; 88 | private int planeUvMatrixUniform; 89 | 90 | private FloatBuffer vertexBuffer = 91 | ByteBuffer.allocateDirect(INITIAL_VERTEX_BUFFER_SIZE_BYTES) 92 | .order(ByteOrder.nativeOrder()) 93 | .asFloatBuffer(); 94 | private ShortBuffer indexBuffer = 95 | ByteBuffer.allocateDirect(INITIAL_INDEX_BUFFER_SIZE_BYTES) 96 | .order(ByteOrder.nativeOrder()) 97 | .asShortBuffer(); 98 | 99 | // Temporary lists/matrices allocated here to reduce number of allocations for each frame. 100 | private final float[] modelMatrix = new float[16]; 101 | private final float[] modelViewMatrix = new float[16]; 102 | private final float[] modelViewProjectionMatrix = new float[16]; 103 | private final float[] planeAngleUvMatrix = 104 | new float[4]; // 2x2 rotation matrix applied to uv coords. 105 | 106 | private final Map planeIndexMap = new HashMap<>(); 107 | 108 | public PlaneRenderer() {} 109 | 110 | /** 111 | * Allocates and initializes OpenGL resources needed by the plane renderer. Must be called on the 112 | * OpenGL thread, typically in {@link GLSurfaceView.Renderer#onSurfaceCreated(GL10, EGLConfig)}. 113 | * 114 | * @param context Needed to access shader source and texture PNG. 115 | * @param gridDistanceTextureName Name of the PNG file containing the grid texture. 116 | */ 117 | public void createOnGlThread(Context context, String gridDistanceTextureName) throws IOException { 118 | int vertexShader = 119 | ShaderUtil.loadGLShader(TAG, context, GLES20.GL_VERTEX_SHADER, VERTEX_SHADER_NAME); 120 | int passthroughShader = 121 | ShaderUtil.loadGLShader(TAG, context, GLES20.GL_FRAGMENT_SHADER, FRAGMENT_SHADER_NAME); 122 | 123 | planeProgram = GLES20.glCreateProgram(); 124 | GLES20.glAttachShader(planeProgram, vertexShader); 125 | GLES20.glAttachShader(planeProgram, passthroughShader); 126 | GLES20.glLinkProgram(planeProgram); 127 | GLES20.glUseProgram(planeProgram); 128 | 129 | ShaderUtil.checkGLError(TAG, "Program creation"); 130 | 131 | // Read the texture. 132 | Bitmap textureBitmap = 133 | BitmapFactory.decodeStream(context.getAssets().open(gridDistanceTextureName)); 134 | 135 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0); 136 | GLES20.glGenTextures(textures.length, textures, 0); 137 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]); 138 | 139 | GLES20.glTexParameteri( 140 | GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR_MIPMAP_LINEAR); 141 | GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); 142 | GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, textureBitmap, 0); 143 | GLES20.glGenerateMipmap(GLES20.GL_TEXTURE_2D); 144 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0); 145 | 146 | ShaderUtil.checkGLError(TAG, "Texture loading"); 147 | 148 | planeXZPositionAlphaAttribute = GLES20.glGetAttribLocation(planeProgram, "a_XZPositionAlpha"); 149 | 150 | planeModelUniform = GLES20.glGetUniformLocation(planeProgram, "u_Model"); 151 | planeNormalUniform = GLES20.glGetUniformLocation(planeProgram, "u_Normal"); 152 | planeModelViewProjectionUniform = 153 | GLES20.glGetUniformLocation(planeProgram, "u_ModelViewProjection"); 154 | textureUniform = GLES20.glGetUniformLocation(planeProgram, "u_Texture"); 155 | gridControlUniform = GLES20.glGetUniformLocation(planeProgram, "u_gridControl"); 156 | planeUvMatrixUniform = GLES20.glGetUniformLocation(planeProgram, "u_PlaneUvMatrix"); 157 | 158 | ShaderUtil.checkGLError(TAG, "Program parameters"); 159 | } 160 | 161 | /** Updates the plane model transform matrix and extents. */ 162 | private void updatePlaneParameters( 163 | float[] planeMatrix, float extentX, float extentZ, FloatBuffer boundary) { 164 | System.arraycopy(planeMatrix, 0, modelMatrix, 0, 16); 165 | if (boundary == null) { 166 | vertexBuffer.limit(0); 167 | indexBuffer.limit(0); 168 | return; 169 | } 170 | 171 | // Generate a new set of vertices and a corresponding triangle strip index set so that 172 | // the plane boundary polygon has a fading edge. This is done by making a copy of the 173 | // boundary polygon vertices and scaling it down around center to push it inwards. Then 174 | // the index buffer is setup accordingly. 175 | boundary.rewind(); 176 | int boundaryVertices = boundary.limit() / 2; 177 | int numVertices; 178 | int numIndices; 179 | 180 | numVertices = boundaryVertices * VERTS_PER_BOUNDARY_VERT; 181 | // drawn as GL_TRIANGLE_STRIP with 3n-2 triangles (n-2 for fill, 2n for perimeter). 182 | numIndices = boundaryVertices * INDICES_PER_BOUNDARY_VERT; 183 | 184 | if (vertexBuffer.capacity() < numVertices * COORDS_PER_VERTEX) { 185 | int size = vertexBuffer.capacity(); 186 | while (size < numVertices * COORDS_PER_VERTEX) { 187 | size *= 2; 188 | } 189 | vertexBuffer = 190 | ByteBuffer.allocateDirect(BYTES_PER_FLOAT * size) 191 | .order(ByteOrder.nativeOrder()) 192 | .asFloatBuffer(); 193 | } 194 | vertexBuffer.rewind(); 195 | vertexBuffer.limit(numVertices * COORDS_PER_VERTEX); 196 | 197 | if (indexBuffer.capacity() < numIndices) { 198 | int size = indexBuffer.capacity(); 199 | while (size < numIndices) { 200 | size *= 2; 201 | } 202 | indexBuffer = 203 | ByteBuffer.allocateDirect(BYTES_PER_SHORT * size) 204 | .order(ByteOrder.nativeOrder()) 205 | .asShortBuffer(); 206 | } 207 | indexBuffer.rewind(); 208 | indexBuffer.limit(numIndices); 209 | 210 | // Note: when either dimension of the bounding box is smaller than 2*FADE_RADIUS_M we 211 | // generate a bunch of 0-area triangles. These don't get rendered though so it works 212 | // out ok. 213 | float xScale = Math.max((extentX - 2 * FADE_RADIUS_M) / extentX, 0.0f); 214 | float zScale = Math.max((extentZ - 2 * FADE_RADIUS_M) / extentZ, 0.0f); 215 | 216 | while (boundary.hasRemaining()) { 217 | float x = boundary.get(); 218 | float z = boundary.get(); 219 | vertexBuffer.put(x); 220 | vertexBuffer.put(z); 221 | vertexBuffer.put(0.0f); 222 | vertexBuffer.put(x * xScale); 223 | vertexBuffer.put(z * zScale); 224 | vertexBuffer.put(1.0f); 225 | } 226 | 227 | // step 1, perimeter 228 | indexBuffer.put((short) ((boundaryVertices - 1) * 2)); 229 | for (int i = 0; i < boundaryVertices; ++i) { 230 | indexBuffer.put((short) (i * 2)); 231 | indexBuffer.put((short) (i * 2 + 1)); 232 | } 233 | indexBuffer.put((short) 1); 234 | // This leaves us on the interior edge of the perimeter between the inset vertices 235 | // for boundary verts n-1 and 0. 236 | 237 | // step 2, interior: 238 | for (int i = 1; i < boundaryVertices / 2; ++i) { 239 | indexBuffer.put((short) ((boundaryVertices - 1 - i) * 2 + 1)); 240 | indexBuffer.put((short) (i * 2 + 1)); 241 | } 242 | if (boundaryVertices % 2 != 0) { 243 | indexBuffer.put((short) ((boundaryVertices / 2) * 2 + 1)); 244 | } 245 | } 246 | 247 | private void draw(float[] cameraView, float[] cameraPerspective, float[] planeNormal) { 248 | // Build the ModelView and ModelViewProjection matrices 249 | // for calculating cube position and light. 250 | Matrix.multiplyMM(modelViewMatrix, 0, cameraView, 0, modelMatrix, 0); 251 | Matrix.multiplyMM(modelViewProjectionMatrix, 0, cameraPerspective, 0, modelViewMatrix, 0); 252 | 253 | // Set the position of the plane 254 | vertexBuffer.rewind(); 255 | GLES20.glVertexAttribPointer( 256 | planeXZPositionAlphaAttribute, 257 | COORDS_PER_VERTEX, 258 | GLES20.GL_FLOAT, 259 | false, 260 | BYTES_PER_FLOAT * COORDS_PER_VERTEX, 261 | vertexBuffer); 262 | 263 | // Set the Model and ModelViewProjection matrices in the shader. 264 | GLES20.glUniformMatrix4fv(planeModelUniform, 1, false, modelMatrix, 0); 265 | GLES20.glUniform3f(planeNormalUniform, planeNormal[0], planeNormal[1], planeNormal[2]); 266 | GLES20.glUniformMatrix4fv( 267 | planeModelViewProjectionUniform, 1, false, modelViewProjectionMatrix, 0); 268 | 269 | indexBuffer.rewind(); 270 | GLES20.glDrawElements( 271 | GLES20.GL_TRIANGLE_STRIP, indexBuffer.limit(), GLES20.GL_UNSIGNED_SHORT, indexBuffer); 272 | ShaderUtil.checkGLError(TAG, "Drawing plane"); 273 | } 274 | 275 | static class SortablePlane { 276 | final float distance; 277 | final Plane plane; 278 | 279 | SortablePlane(float distance, Plane plane) { 280 | this.distance = distance; 281 | this.plane = plane; 282 | } 283 | } 284 | 285 | /** 286 | * Draws the collection of tracked planes, with closer planes hiding more distant ones. 287 | * 288 | * @param allPlanes The collection of planes to draw. 289 | * @param cameraPose The pose of the camera, as returned by {@link Camera#getPose()} 290 | * @param cameraPerspective The projection matrix, as returned by {@link 291 | * Camera#getProjectionMatrix(float[], int, float, float)} 292 | */ 293 | public void drawPlanes(Collection allPlanes, Pose cameraPose, float[] cameraPerspective) { 294 | // Planes must be sorted by distance from camera so that we draw closer planes first, and 295 | // they occlude the farther planes. 296 | List sortedPlanes = new ArrayList<>(); 297 | 298 | for (Plane plane : allPlanes) { 299 | if (plane.getTrackingState() != TrackingState.TRACKING || plane.getSubsumedBy() != null) { 300 | continue; 301 | } 302 | 303 | float distance = calculateDistanceToPlane(plane.getCenterPose(), cameraPose); 304 | if (distance < 0) { // Plane is back-facing. 305 | continue; 306 | } 307 | sortedPlanes.add(new SortablePlane(distance, plane)); 308 | } 309 | Collections.sort( 310 | sortedPlanes, 311 | new Comparator() { 312 | @Override 313 | public int compare(SortablePlane a, SortablePlane b) { 314 | return Float.compare(b.distance, a.distance); 315 | } 316 | }); 317 | 318 | float[] cameraView = new float[16]; 319 | cameraPose.inverse().toMatrix(cameraView, 0); 320 | 321 | // Disable depth write. 322 | GLES20.glDepthMask(false); 323 | 324 | // Normal alpha blending with premultiplied alpha. 325 | GLES20.glEnable(GLES20.GL_BLEND); 326 | GLES20.glBlendFunc(GLES20.GL_ONE, GLES20.GL_ONE_MINUS_SRC_ALPHA); 327 | 328 | // Set up the shader. 329 | GLES20.glUseProgram(planeProgram); 330 | 331 | // Attach the texture. 332 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0); 333 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]); 334 | GLES20.glUniform1i(textureUniform, 0); 335 | 336 | // Shared fragment uniforms. 337 | GLES20.glUniform4fv(gridControlUniform, 1, GRID_CONTROL, 0); 338 | 339 | // Enable vertex arrays 340 | GLES20.glEnableVertexAttribArray(planeXZPositionAlphaAttribute); 341 | 342 | ShaderUtil.checkGLError(TAG, "Setting up to draw planes"); 343 | 344 | for (SortablePlane sortedPlane : sortedPlanes) { 345 | Plane plane = sortedPlane.plane; 346 | float[] planeMatrix = new float[16]; 347 | plane.getCenterPose().toMatrix(planeMatrix, 0); 348 | 349 | float[] normal = new float[3]; 350 | // Get transformed Y axis of plane's coordinate system. 351 | plane.getCenterPose().getTransformedAxis(1, 1.0f, normal, 0); 352 | 353 | updatePlaneParameters( 354 | planeMatrix, plane.getExtentX(), plane.getExtentZ(), plane.getPolygon()); 355 | 356 | // Get plane index. Keep a map to assign same indices to same planes. 357 | Integer planeIndex = planeIndexMap.get(plane); 358 | if (planeIndex == null) { 359 | planeIndex = planeIndexMap.size(); 360 | planeIndexMap.put(plane, planeIndex); 361 | } 362 | 363 | // Each plane will have its own angle offset from others, to make them easier to 364 | // distinguish. Compute a 2x2 rotation matrix from the angle. 365 | float angleRadians = planeIndex * 0.144f; 366 | float uScale = DOTS_PER_METER; 367 | float vScale = DOTS_PER_METER * EQUILATERAL_TRIANGLE_SCALE; 368 | planeAngleUvMatrix[0] = +(float) Math.cos(angleRadians) * uScale; 369 | planeAngleUvMatrix[1] = -(float) Math.sin(angleRadians) * vScale; 370 | planeAngleUvMatrix[2] = +(float) Math.sin(angleRadians) * uScale; 371 | planeAngleUvMatrix[3] = +(float) Math.cos(angleRadians) * vScale; 372 | GLES20.glUniformMatrix2fv(planeUvMatrixUniform, 1, false, planeAngleUvMatrix, 0); 373 | 374 | draw(cameraView, cameraPerspective, normal); 375 | } 376 | 377 | // Clean up the state we set 378 | GLES20.glDisableVertexAttribArray(planeXZPositionAlphaAttribute); 379 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0); 380 | GLES20.glDisable(GLES20.GL_BLEND); 381 | GLES20.glDepthMask(true); 382 | 383 | ShaderUtil.checkGLError(TAG, "Cleaning up after drawing planes"); 384 | } 385 | 386 | // Calculate the normal distance to plane from cameraPose, the given planePose should have y axis 387 | // parallel to plane's normal, for example plane's center pose or hit test pose. 388 | public static float calculateDistanceToPlane(Pose planePose, Pose cameraPose) { 389 | float[] normal = new float[3]; 390 | float cameraX = cameraPose.tx(); 391 | float cameraY = cameraPose.ty(); 392 | float cameraZ = cameraPose.tz(); 393 | // Get transformed Y axis of plane's coordinate system. 394 | planePose.getTransformedAxis(1, 1.0f, normal, 0); 395 | // Compute dot product of plane's normal with vector from camera to plane center. 396 | return (cameraX - planePose.tx()) * normal[0] 397 | + (cameraY - planePose.ty()) * normal[1] 398 | + (cameraZ - planePose.tz()) * normal[2]; 399 | } 400 | } 401 | -------------------------------------------------------------------------------- /app/src/main/java/com/google/ar/core/examples/java/common/rendering/PointCloudRenderer.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.google.ar.core.examples.java.common.rendering; 17 | 18 | import android.content.Context; 19 | import android.opengl.GLES20; 20 | import android.opengl.GLSurfaceView; 21 | import android.opengl.Matrix; 22 | import com.google.ar.core.PointCloud; 23 | import java.io.IOException; 24 | 25 | /** Renders a point cloud. */ 26 | public class PointCloudRenderer { 27 | private static final String TAG = PointCloud.class.getSimpleName(); 28 | 29 | // Shader names. 30 | private static final String VERTEX_SHADER_NAME = "shaders/point_cloud.vert"; 31 | private static final String FRAGMENT_SHADER_NAME = "shaders/point_cloud.frag"; 32 | 33 | private static final int BYTES_PER_FLOAT = Float.SIZE / 8; 34 | private static final int FLOATS_PER_POINT = 4; // X,Y,Z,confidence. 35 | private static final int BYTES_PER_POINT = BYTES_PER_FLOAT * FLOATS_PER_POINT; 36 | private static final int INITIAL_BUFFER_POINTS = 1000; 37 | 38 | private int vbo; 39 | private int vboSize; 40 | 41 | private int programName; 42 | private int positionAttribute; 43 | private int modelViewProjectionUniform; 44 | private int colorUniform; 45 | private int pointSizeUniform; 46 | 47 | private int numPoints = 0; 48 | 49 | // Keep track of the last point cloud rendered to avoid updating the VBO if point cloud 50 | // was not changed. Do this using the timestamp since we can't compare PointCloud objects. 51 | private long lastTimestamp = 0; 52 | 53 | public PointCloudRenderer() {} 54 | 55 | /** 56 | * Allocates and initializes OpenGL resources needed by the plane renderer. Must be called on the 57 | * OpenGL thread, typically in {@link GLSurfaceView.Renderer#onSurfaceCreated(GL10, EGLConfig)}. 58 | * 59 | * @param context Needed to access shader source. 60 | */ 61 | public void createOnGlThread(Context context) throws IOException { 62 | ShaderUtil.checkGLError(TAG, "before create"); 63 | 64 | int[] buffers = new int[1]; 65 | GLES20.glGenBuffers(1, buffers, 0); 66 | vbo = buffers[0]; 67 | GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vbo); 68 | 69 | vboSize = INITIAL_BUFFER_POINTS * BYTES_PER_POINT; 70 | GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, vboSize, null, GLES20.GL_DYNAMIC_DRAW); 71 | GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0); 72 | 73 | ShaderUtil.checkGLError(TAG, "buffer alloc"); 74 | 75 | int vertexShader = 76 | ShaderUtil.loadGLShader(TAG, context, GLES20.GL_VERTEX_SHADER, VERTEX_SHADER_NAME); 77 | int passthroughShader = 78 | ShaderUtil.loadGLShader(TAG, context, GLES20.GL_FRAGMENT_SHADER, FRAGMENT_SHADER_NAME); 79 | 80 | programName = GLES20.glCreateProgram(); 81 | GLES20.glAttachShader(programName, vertexShader); 82 | GLES20.glAttachShader(programName, passthroughShader); 83 | GLES20.glLinkProgram(programName); 84 | GLES20.glUseProgram(programName); 85 | 86 | ShaderUtil.checkGLError(TAG, "program"); 87 | 88 | positionAttribute = GLES20.glGetAttribLocation(programName, "a_Position"); 89 | colorUniform = GLES20.glGetUniformLocation(programName, "u_Color"); 90 | modelViewProjectionUniform = GLES20.glGetUniformLocation(programName, "u_ModelViewProjection"); 91 | pointSizeUniform = GLES20.glGetUniformLocation(programName, "u_PointSize"); 92 | 93 | ShaderUtil.checkGLError(TAG, "program params"); 94 | } 95 | 96 | /** 97 | * Updates the OpenGL buffer contents to the provided point. Repeated calls with the same point 98 | * cloud will be ignored. 99 | */ 100 | public void update(PointCloud cloud) { 101 | if (cloud.getTimestamp() == lastTimestamp) { 102 | // Redundant call. 103 | return; 104 | } 105 | ShaderUtil.checkGLError(TAG, "before update"); 106 | 107 | GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vbo); 108 | lastTimestamp = cloud.getTimestamp(); 109 | 110 | // If the VBO is not large enough to fit the new point cloud, resize it. 111 | numPoints = cloud.getPoints().remaining() / FLOATS_PER_POINT; 112 | if (numPoints * BYTES_PER_POINT > vboSize) { 113 | while (numPoints * BYTES_PER_POINT > vboSize) { 114 | vboSize *= 2; 115 | } 116 | GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, vboSize, null, GLES20.GL_DYNAMIC_DRAW); 117 | } 118 | GLES20.glBufferSubData( 119 | GLES20.GL_ARRAY_BUFFER, 0, numPoints * BYTES_PER_POINT, cloud.getPoints()); 120 | GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0); 121 | 122 | ShaderUtil.checkGLError(TAG, "after update"); 123 | } 124 | 125 | /** 126 | * Renders the point cloud. ARCore point cloud is given in world space. 127 | * 128 | * @param cameraView the camera view matrix for this frame, typically from {@link 129 | * com.google.ar.core.Camera#getViewMatrix(float[], int)}. 130 | * @param cameraPerspective the camera projection matrix for this frame, typically from {@link 131 | * com.google.ar.core.Camera#getProjectionMatrix(float[], int, float, float)}. 132 | */ 133 | public void draw(float[] cameraView, float[] cameraPerspective) { 134 | float[] modelViewProjection = new float[16]; 135 | Matrix.multiplyMM(modelViewProjection, 0, cameraPerspective, 0, cameraView, 0); 136 | 137 | ShaderUtil.checkGLError(TAG, "Before draw"); 138 | 139 | GLES20.glUseProgram(programName); 140 | GLES20.glEnableVertexAttribArray(positionAttribute); 141 | GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vbo); 142 | GLES20.glVertexAttribPointer(positionAttribute, 4, GLES20.GL_FLOAT, false, BYTES_PER_POINT, 0); 143 | GLES20.glUniform4f(colorUniform, 31.0f / 255.0f, 188.0f / 255.0f, 210.0f / 255.0f, 1.0f); 144 | GLES20.glUniformMatrix4fv(modelViewProjectionUniform, 1, false, modelViewProjection, 0); 145 | GLES20.glUniform1f(pointSizeUniform, 5.0f); 146 | 147 | GLES20.glDrawArrays(GLES20.GL_POINTS, 0, numPoints); 148 | GLES20.glDisableVertexAttribArray(positionAttribute); 149 | GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0); 150 | 151 | ShaderUtil.checkGLError(TAG, "Draw"); 152 | } 153 | } 154 | -------------------------------------------------------------------------------- /app/src/main/java/com/google/ar/core/examples/java/common/rendering/ShaderUtil.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.google.ar.core.examples.java.common.rendering; 17 | 18 | import android.content.Context; 19 | import android.opengl.GLES20; 20 | import android.util.Log; 21 | import java.io.BufferedReader; 22 | import java.io.IOException; 23 | import java.io.InputStream; 24 | import java.io.InputStreamReader; 25 | import java.util.Map; 26 | import java.util.TreeMap; 27 | 28 | /** Shader helper functions. */ 29 | public class ShaderUtil { 30 | /** 31 | * Converts a raw text file, saved as a resource, into an OpenGL ES shader. 32 | * 33 | * @param type The type of shader we will be creating. 34 | * @param filename The filename of the asset file about to be turned into a shader. 35 | * @param defineValuesMap The #define values to add to the top of the shader source code. 36 | * @return The shader object handler. 37 | */ 38 | public static int loadGLShader( 39 | String tag, Context context, int type, String filename, Map defineValuesMap) 40 | throws IOException { 41 | // Load shader source code. 42 | String code = readShaderFileFromAssets(context, filename); 43 | 44 | // Prepend any #define values specified during this run. 45 | String defines = ""; 46 | for (Map.Entry entry : defineValuesMap.entrySet()) { 47 | defines += "#define " + entry.getKey() + " " + entry.getValue() + "\n"; 48 | } 49 | code = defines + code; 50 | 51 | // Compiles shader code. 52 | int shader = GLES20.glCreateShader(type); 53 | GLES20.glShaderSource(shader, code); 54 | GLES20.glCompileShader(shader); 55 | 56 | // Get the compilation status. 57 | final int[] compileStatus = new int[1]; 58 | GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0); 59 | 60 | // If the compilation failed, delete the shader. 61 | if (compileStatus[0] == 0) { 62 | Log.e(tag, "Error compiling shader: " + GLES20.glGetShaderInfoLog(shader)); 63 | GLES20.glDeleteShader(shader); 64 | shader = 0; 65 | } 66 | 67 | if (shader == 0) { 68 | throw new RuntimeException("Error creating shader."); 69 | } 70 | 71 | return shader; 72 | } 73 | 74 | /** Overload of loadGLShader that assumes no additional #define values to add. */ 75 | public static int loadGLShader(String tag, Context context, int type, String filename) 76 | throws IOException { 77 | Map emptyDefineValuesMap = new TreeMap<>(); 78 | return loadGLShader(tag, context, type, filename, emptyDefineValuesMap); 79 | } 80 | 81 | /** 82 | * Checks if we've had an error inside of OpenGL ES, and if so what that error is. 83 | * 84 | * @param label Label to report in case of error. 85 | * @throws RuntimeException If an OpenGL error is detected. 86 | */ 87 | public static void checkGLError(String tag, String label) { 88 | int lastError = GLES20.GL_NO_ERROR; 89 | // Drain the queue of all errors. 90 | int error; 91 | while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) { 92 | Log.e(tag, label + ": glError " + error); 93 | lastError = error; 94 | } 95 | if (lastError != GLES20.GL_NO_ERROR) { 96 | throw new RuntimeException(label + ": glError " + lastError); 97 | } 98 | } 99 | 100 | /** 101 | * Converts a raw shader file into a string. 102 | * 103 | * @param filename The filename of the shader file about to be turned into a shader. 104 | * @return The context of the text file, or null in case of error. 105 | */ 106 | private static String readShaderFileFromAssets(Context context, String filename) 107 | throws IOException { 108 | try (InputStream inputStream = context.getAssets().open(filename); 109 | BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream))) { 110 | StringBuilder sb = new StringBuilder(); 111 | String line; 112 | while ((line = reader.readLine()) != null) { 113 | String[] tokens = line.split(" ", -1); 114 | if (tokens[0].equals("#include")) { 115 | String includeFilename = tokens[1]; 116 | includeFilename = includeFilename.replace("\"", ""); 117 | if (includeFilename.equals(filename)) { 118 | throw new IOException("Do not include the calling file."); 119 | } 120 | sb.append(readShaderFileFromAssets(context, includeFilename)); 121 | } else { 122 | sb.append(line).append("\n"); 123 | } 124 | } 125 | return sb.toString(); 126 | } 127 | } 128 | } 129 | -------------------------------------------------------------------------------- /app/src/main/res/drawable-xxhdpi/ic_launcher.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MaximeDup/AndroidArcoreFacesStreaming/7d1d47cd1d320f9e2e0037d76280f58b3919555e/app/src/main/res/drawable-xxhdpi/ic_launcher.png -------------------------------------------------------------------------------- /app/src/main/res/layout/activity_main.xml: -------------------------------------------------------------------------------- 1 | 16 | 21 | 22 | 27 | 28 | 48 | 49 | 50 | -------------------------------------------------------------------------------- /app/src/main/res/values/strings.xml: -------------------------------------------------------------------------------- 1 | 2 | 17 | 18 | AugmentedFaces Java 20 | 21 | -------------------------------------------------------------------------------- /app/src/main/res/values/styles.xml: -------------------------------------------------------------------------------- 1 | 16 | 17 | 18 | 22 | 29 | 30 | 31 | 34 | 35 | 36 | -------------------------------------------------------------------------------- /build.gradle: -------------------------------------------------------------------------------- 1 | // Top-level build file where you can add configuration options common to all sub-projects/modules. 2 | 3 | buildscript { 4 | repositories { 5 | google() 6 | jcenter() 7 | } 8 | dependencies { 9 | classpath 'com.android.tools.build:gradle:4.2.1' 10 | // NOTE: Do not place your application dependencies here; they belong 11 | // in the individual module build.gradle files 12 | } 13 | } 14 | 15 | allprojects { 16 | repositories { 17 | google() 18 | jcenter() 19 | mavenLocal() 20 | } 21 | } 22 | 23 | task clean(type: Delete) { 24 | delete rootProject.buildDir 25 | } 26 | -------------------------------------------------------------------------------- /gradle.properties: -------------------------------------------------------------------------------- 1 | # Project-wide Gradle settings. 2 | 3 | # IDE (e.g. Android Studio) users: 4 | # Gradle settings configured through the IDE *will override* 5 | # any settings specified in this file. 6 | 7 | # For more details on how to configure your build environment visit 8 | # http://www.gradle.org/docs/current/userguide/build_environment.html 9 | 10 | # Specifies the JVM arguments used for the daemon process. 11 | # The setting is particularly useful for tweaking memory settings. 12 | org.gradle.jvmargs=-Xmx1536m 13 | 14 | # When configured, Gradle will run in incubating parallel mode. 15 | # This option should only be used with decoupled projects. More details, visit 16 | # http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects 17 | # org.gradle.parallel=true 18 | 19 | # Migrating to AndroidX, for targetSdkVersion 29. 20 | # For details, see https://developer.android.com/jetpack/androidx/migrate 21 | android.useAndroidX=true 22 | android.enableJetifier=true 23 | -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MaximeDup/AndroidArcoreFacesStreaming/7d1d47cd1d320f9e2e0037d76280f58b3919555e/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | #Mon Nov 20 10:27:45 PST 2017 2 | distributionBase=GRADLE_USER_HOME 3 | distributionPath=wrapper/dists 4 | zipStoreBase=GRADLE_USER_HOME 5 | zipStorePath=wrapper/dists 6 | distributionUrl=https\://services.gradle.org/distributions/gradle-6.7.1-all.zip 7 | -------------------------------------------------------------------------------- /gradlew: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env sh 2 | 3 | # 4 | # Copyright 2015 the original author or authors. 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # You may obtain a copy of the License at 9 | # 10 | # https://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | # 18 | 19 | ############################################################################## 20 | ## 21 | ## Gradle start up script for UN*X 22 | ## 23 | ############################################################################## 24 | 25 | # Attempt to set APP_HOME 26 | # Resolve links: $0 may be a link 27 | PRG="$0" 28 | # Need this for relative symlinks. 29 | while [ -h "$PRG" ] ; do 30 | ls=`ls -ld "$PRG"` 31 | link=`expr "$ls" : '.*-> \(.*\)$'` 32 | if expr "$link" : '/.*' > /dev/null; then 33 | PRG="$link" 34 | else 35 | PRG=`dirname "$PRG"`"/$link" 36 | fi 37 | done 38 | SAVED="`pwd`" 39 | cd "`dirname \"$PRG\"`/" >/dev/null 40 | APP_HOME="`pwd -P`" 41 | cd "$SAVED" >/dev/null 42 | 43 | APP_NAME="Gradle" 44 | APP_BASE_NAME=`basename "$0"` 45 | 46 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 47 | DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' 48 | 49 | # Use the maximum available, or set MAX_FD != -1 to use that value. 50 | MAX_FD="maximum" 51 | 52 | warn () { 53 | echo "$*" 54 | } 55 | 56 | die () { 57 | echo 58 | echo "$*" 59 | echo 60 | exit 1 61 | } 62 | 63 | # OS specific support (must be 'true' or 'false'). 64 | cygwin=false 65 | msys=false 66 | darwin=false 67 | nonstop=false 68 | case "`uname`" in 69 | CYGWIN* ) 70 | cygwin=true 71 | ;; 72 | Darwin* ) 73 | darwin=true 74 | ;; 75 | MINGW* ) 76 | msys=true 77 | ;; 78 | NONSTOP* ) 79 | nonstop=true 80 | ;; 81 | esac 82 | 83 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar 84 | 85 | 86 | # Determine the Java command to use to start the JVM. 87 | if [ -n "$JAVA_HOME" ] ; then 88 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then 89 | # IBM's JDK on AIX uses strange locations for the executables 90 | JAVACMD="$JAVA_HOME/jre/sh/java" 91 | else 92 | JAVACMD="$JAVA_HOME/bin/java" 93 | fi 94 | if [ ! -x "$JAVACMD" ] ; then 95 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME 96 | 97 | Please set the JAVA_HOME variable in your environment to match the 98 | location of your Java installation." 99 | fi 100 | else 101 | JAVACMD="java" 102 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 103 | 104 | Please set the JAVA_HOME variable in your environment to match the 105 | location of your Java installation." 106 | fi 107 | 108 | # Increase the maximum file descriptors if we can. 109 | if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then 110 | MAX_FD_LIMIT=`ulimit -H -n` 111 | if [ $? -eq 0 ] ; then 112 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then 113 | MAX_FD="$MAX_FD_LIMIT" 114 | fi 115 | ulimit -n $MAX_FD 116 | if [ $? -ne 0 ] ; then 117 | warn "Could not set maximum file descriptor limit: $MAX_FD" 118 | fi 119 | else 120 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" 121 | fi 122 | fi 123 | 124 | # For Darwin, add options to specify how the application appears in the dock 125 | if $darwin; then 126 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" 127 | fi 128 | 129 | # For Cygwin or MSYS, switch paths to Windows format before running java 130 | if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then 131 | APP_HOME=`cygpath --path --mixed "$APP_HOME"` 132 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` 133 | 134 | JAVACMD=`cygpath --unix "$JAVACMD"` 135 | 136 | # We build the pattern for arguments to be converted via cygpath 137 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` 138 | SEP="" 139 | for dir in $ROOTDIRSRAW ; do 140 | ROOTDIRS="$ROOTDIRS$SEP$dir" 141 | SEP="|" 142 | done 143 | OURCYGPATTERN="(^($ROOTDIRS))" 144 | # Add a user-defined pattern to the cygpath arguments 145 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then 146 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" 147 | fi 148 | # Now convert the arguments - kludge to limit ourselves to /bin/sh 149 | i=0 150 | for arg in "$@" ; do 151 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` 152 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option 153 | 154 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition 155 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` 156 | else 157 | eval `echo args$i`="\"$arg\"" 158 | fi 159 | i=`expr $i + 1` 160 | done 161 | case $i in 162 | 0) set -- ;; 163 | 1) set -- "$args0" ;; 164 | 2) set -- "$args0" "$args1" ;; 165 | 3) set -- "$args0" "$args1" "$args2" ;; 166 | 4) set -- "$args0" "$args1" "$args2" "$args3" ;; 167 | 5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; 168 | 6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; 169 | 7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; 170 | 8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; 171 | 9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; 172 | esac 173 | fi 174 | 175 | # Escape application args 176 | save () { 177 | for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done 178 | echo " " 179 | } 180 | APP_ARGS=`save "$@"` 181 | 182 | # Collect all arguments for the java command, following the shell quoting and substitution rules 183 | eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" 184 | 185 | exec "$JAVACMD" "$@" 186 | -------------------------------------------------------------------------------- /gradlew.bat: -------------------------------------------------------------------------------- 1 | @rem 2 | @rem Copyright 2015 the original author or authors. 3 | @rem 4 | @rem Licensed under the Apache License, Version 2.0 (the "License"); 5 | @rem you may not use this file except in compliance with the License. 6 | @rem You may obtain a copy of the License at 7 | @rem 8 | @rem https://www.apache.org/licenses/LICENSE-2.0 9 | @rem 10 | @rem Unless required by applicable law or agreed to in writing, software 11 | @rem distributed under the License is distributed on an "AS IS" BASIS, 12 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | @rem See the License for the specific language governing permissions and 14 | @rem limitations under the License. 15 | @rem 16 | 17 | @if "%DEBUG%" == "" @echo off 18 | @rem ########################################################################## 19 | @rem 20 | @rem Gradle startup script for Windows 21 | @rem 22 | @rem ########################################################################## 23 | 24 | @rem Set local scope for the variables with windows NT shell 25 | if "%OS%"=="Windows_NT" setlocal 26 | 27 | set DIRNAME=%~dp0 28 | if "%DIRNAME%" == "" set DIRNAME=. 29 | set APP_BASE_NAME=%~n0 30 | set APP_HOME=%DIRNAME% 31 | 32 | @rem Resolve any "." and ".." in APP_HOME to make it shorter. 33 | for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi 34 | 35 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 36 | set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" 37 | 38 | @rem Find java.exe 39 | if defined JAVA_HOME goto findJavaFromJavaHome 40 | 41 | set JAVA_EXE=java.exe 42 | %JAVA_EXE% -version >NUL 2>&1 43 | if "%ERRORLEVEL%" == "0" goto execute 44 | 45 | echo. 46 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 47 | echo. 48 | echo Please set the JAVA_HOME variable in your environment to match the 49 | echo location of your Java installation. 50 | 51 | goto fail 52 | 53 | :findJavaFromJavaHome 54 | set JAVA_HOME=%JAVA_HOME:"=% 55 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe 56 | 57 | if exist "%JAVA_EXE%" goto execute 58 | 59 | echo. 60 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 61 | echo. 62 | echo Please set the JAVA_HOME variable in your environment to match the 63 | echo location of your Java installation. 64 | 65 | goto fail 66 | 67 | :execute 68 | @rem Setup the command line 69 | 70 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar 71 | 72 | 73 | @rem Execute Gradle 74 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* 75 | 76 | :end 77 | @rem End local scope for the variables with windows NT shell 78 | if "%ERRORLEVEL%"=="0" goto mainEnd 79 | 80 | :fail 81 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of 82 | rem the _cmd.exe /c_ return code! 83 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 84 | exit /b 1 85 | 86 | :mainEnd 87 | if "%OS%"=="Windows_NT" endlocal 88 | 89 | :omega 90 | -------------------------------------------------------------------------------- /settings.gradle: -------------------------------------------------------------------------------- 1 | include ':app' 2 | --------------------------------------------------------------------------------