├── handtrackinggpu
├── res
│ ├── drawable
│ │ ├── bmg.jpg
│ │ └── img2.jpg
│ ├── values
│ │ ├── strings.xml
│ │ ├── colors.xml
│ │ └── styles.xml
│ └── layout
│ │ └── activity_main.xml
├── CustomFrameAvailableListner.java
├── AndroidManifest.xml
├── BmpProducer.java
├── BUILD
├── MainActivity.java
└── BitmapConverter.java
├── Screenshot_20191231_164913_com.google.mediapipe.apps.handtrackinggpu.jpg
├── README.md
└── .gitignore
/handtrackinggpu/res/drawable/bmg.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/afsaredrisy/MediapipeHandtracking_GPU_Bitmap_Input/HEAD/handtrackinggpu/res/drawable/bmg.jpg
--------------------------------------------------------------------------------
/handtrackinggpu/res/drawable/img2.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/afsaredrisy/MediapipeHandtracking_GPU_Bitmap_Input/HEAD/handtrackinggpu/res/drawable/img2.jpg
--------------------------------------------------------------------------------
/Screenshot_20191231_164913_com.google.mediapipe.apps.handtrackinggpu.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/afsaredrisy/MediapipeHandtracking_GPU_Bitmap_Input/HEAD/Screenshot_20191231_164913_com.google.mediapipe.apps.handtrackinggpu.jpg
--------------------------------------------------------------------------------
/handtrackinggpu/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 | Hand Tracking GPU
3 | Please grant camera permissions.
4 |
5 |
--------------------------------------------------------------------------------
/handtrackinggpu/CustomFrameAvailableListner.java:
--------------------------------------------------------------------------------
1 | package com.google.mediapipe.apps.handtrackinggpu;
2 |
3 | import android.graphics.Bitmap;
4 |
5 |
6 | public interface CustomFrameAvailableListner {
7 |
8 | public void onFrame(Bitmap bitmap);
9 | }
10 |
--------------------------------------------------------------------------------
/handtrackinggpu/res/values/colors.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | #008577
4 | #00574B
5 | #D81B60
6 |
7 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # MediapipeHandtracking_GPU_Bitmap_Input
2 | Handtracking mediapipe sample with bitmap RGB input
3 | ## Demo
4 |
5 | Input Bitmap
6 | 
7 |
8 |
9 | Output
10 | 
11 |
--------------------------------------------------------------------------------
/handtrackinggpu/res/values/styles.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/handtrackinggpu/res/layout/activity_main.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
8 |
13 |
19 |
20 |
21 |
--------------------------------------------------------------------------------
/handtrackinggpu/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 |
5 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
20 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
--------------------------------------------------------------------------------
/handtrackinggpu/BmpProducer.java:
--------------------------------------------------------------------------------
1 | package com.google.mediapipe.apps.handtrackinggpu;
2 |
3 | import android.content.Context;
4 | import android.graphics.Bitmap;
5 | import android.graphics.BitmapFactory;
6 | import android.util.Log;
7 |
8 |
9 |
10 |
11 | public class BmpProducer extends Thread {
12 |
13 | CustomFrameAvailableListner customFrameAvailableListner;
14 |
15 | public int height = 513,width = 513;
16 | Bitmap bmp;
17 |
18 | BmpProducer(Context context){
19 | bmp = BitmapFactory.decodeResource(context.getResources(), R.drawable.img2);
20 | bmp = Bitmap.createScaledBitmap(bmp,480,640,true);
21 | height = bmp.getHeight();
22 | width = bmp.getWidth();
23 | start();
24 | }
25 |
26 | public void setCustomFrameAvailableListner(CustomFrameAvailableListner customFrameAvailableListner){
27 | this.customFrameAvailableListner = customFrameAvailableListner;
28 | }
29 |
30 | public static final String TAG="BmpProducer";
31 | @Override
32 | public void run() {
33 | super.run();
34 | while ((true)){
35 | if(bmp==null || customFrameAvailableListner == null)
36 | continue;
37 | Log.d(TAG,"Writing frame");
38 | customFrameAvailableListner.onFrame(bmp);
39 | /*OTMainActivity.imageView.post(new Runnable() {
40 | @Override
41 | public void run() {
42 | OTMainActivity.imageView.setImageBitmap(bg);
43 | }
44 | });*/
45 | try{
46 | Thread.sleep(10);
47 | }catch (Exception e){
48 | Log.d(TAG,e.toString());
49 | }
50 | }
51 | }
52 | }
53 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Built application files
2 | *.apk
3 | *.aar
4 | *.ap_
5 | *.aab
6 |
7 | # Files for the ART/Dalvik VM
8 | *.dex
9 |
10 | # Java class files
11 | *.class
12 |
13 | # Generated files
14 | bin/
15 | gen/
16 | out/
17 | # Uncomment the following line in case you need and you don't have the release build type files in your app
18 | # release/
19 |
20 | # Gradle files
21 | .gradle/
22 | build/
23 |
24 | # Local configuration file (sdk path, etc)
25 | local.properties
26 |
27 | # Proguard folder generated by Eclipse
28 | proguard/
29 |
30 | # Log Files
31 | *.log
32 |
33 | # Android Studio Navigation editor temp files
34 | .navigation/
35 |
36 | # Android Studio captures folder
37 | captures/
38 |
39 | # IntelliJ
40 | *.iml
41 | .idea/workspace.xml
42 | .idea/tasks.xml
43 | .idea/gradle.xml
44 | .idea/assetWizardSettings.xml
45 | .idea/dictionaries
46 | .idea/libraries
47 | # Android Studio 3 in .gitignore file.
48 | .idea/caches
49 | .idea/modules.xml
50 | # Comment next line if keeping position of elements in Navigation Editor is relevant for you
51 | .idea/navEditor.xml
52 |
53 | # Keystore files
54 | # Uncomment the following lines if you do not want to check your keystore files in.
55 | #*.jks
56 | #*.keystore
57 |
58 | # External native build folder generated in Android Studio 2.2 and later
59 | .externalNativeBuild
60 | .cxx/
61 |
62 | # Google Services (e.g. APIs or Firebase)
63 | # google-services.json
64 |
65 | # Freeline
66 | freeline.py
67 | freeline/
68 | freeline_project_description.json
69 |
70 | # fastlane
71 | fastlane/report.xml
72 | fastlane/Preview.html
73 | fastlane/screenshots
74 | fastlane/test_output
75 | fastlane/readme.md
76 |
77 | # Version control
78 | vcs.xml
79 |
80 | # lint
81 | lint/intermediates/
82 | lint/generated/
83 | lint/outputs/
84 | lint/tmp/
85 | # lint/reports/
86 |
--------------------------------------------------------------------------------
/handtrackinggpu/BUILD:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The MediaPipe Authors.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | licenses(["notice"]) # Apache 2.0
16 |
17 | package(default_visibility = ["//visibility:private"])
18 |
19 | cc_binary(
20 | name = "libmediapipe_jni.so",
21 | linkshared = 1,
22 | linkstatic = 1,
23 | deps = [
24 | "//mediapipe/graphs/hand_tracking:mobile_calculators",
25 | "//mediapipe/java/com/google/mediapipe/framework/jni:mediapipe_framework_jni",
26 | ],
27 | )
28 |
29 | cc_library(
30 | name = "mediapipe_jni_lib",
31 | srcs = [":libmediapipe_jni.so"],
32 | alwayslink = 1,
33 | )
34 |
35 | # Maps the binary graph to an alias (e.g., the app name) for convenience so that the alias can be
36 | # easily incorporated into the app via, for example,
37 | # MainActivity.BINARY_GRAPH_NAME = "appname.binarypb".
38 | genrule(
39 | name = "binary_graph",
40 | srcs = ["//mediapipe/graphs/hand_tracking:hand_tracking_mobile_gpu_binary_graph"],
41 | outs = ["handtrackinggpu.binarypb"],
42 | cmd = "cp $< $@",
43 | )
44 |
45 | # To use the 3D model instead of the default 2D model, add "--define 3D=true" to the
46 | # bazel build command.
47 | config_setting(
48 | name = "use_3d_model",
49 | define_values = {
50 | "3D": "true",
51 | },
52 | )
53 |
54 | genrule(
55 | name = "model",
56 | srcs = select({
57 | "//conditions:default": ["//mediapipe/models:hand_landmark.tflite"],
58 | ":use_3d_model": ["//mediapipe/models:hand_landmark_3d.tflite"],
59 | }),
60 | outs = ["hand_landmark.tflite"],
61 | cmd = "cp $< $@",
62 | )
63 |
64 | android_library(
65 | name = "mediapipe_lib",
66 | srcs = glob(["*.java"]),
67 | assets = [
68 | ":binary_graph",
69 | ":model",
70 | "//mediapipe/models:palm_detection.tflite",
71 | "//mediapipe/models:palm_detection_labelmap.txt",
72 | ],
73 | assets_dir = "",
74 | manifest = "AndroidManifest.xml",
75 | resource_files = glob(["res/**"]),
76 | deps = [
77 | ":mediapipe_jni_lib",
78 | "//mediapipe/framework/formats:landmark_java_proto_lite",
79 | "//mediapipe/java/com/google/mediapipe/components:android_camerax_helper",
80 | "//mediapipe/java/com/google/mediapipe/components:android_components",
81 | "//mediapipe/java/com/google/mediapipe/framework:android_framework",
82 | "//mediapipe/java/com/google/mediapipe/glutil",
83 | "//third_party:androidx_appcompat",
84 | "//third_party:androidx_constraint_layout",
85 | "//third_party:androidx_legacy_support_v4",
86 | "//third_party:androidx_material",
87 | "//third_party:androidx_recyclerview",
88 | "//third_party:opencv",
89 | "@androidx_concurrent_futures//jar",
90 | "@androidx_lifecycle//jar",
91 | "@com_google_code_findbugs//jar",
92 | "@com_google_guava_android//jar",
93 | ],
94 | )
95 |
96 | android_binary(
97 | name = "handtrackinggpu",
98 | manifest = "AndroidManifest.xml",
99 | manifest_values = {"applicationId": "com.google.mediapipe.apps.handtrackinggpu"},
100 | multidex = "native",
101 | deps = [
102 | ":mediapipe_lib",
103 | ],
104 | )
105 |
--------------------------------------------------------------------------------
/handtrackinggpu/MainActivity.java:
--------------------------------------------------------------------------------
1 | package co.introtuce.nex2me.mediapipehandtrackinggradlergbbitmap;
2 |
3 | import android.graphics.SurfaceTexture;
4 | import android.os.Bundle;
5 | import androidx.appcompat.app.AppCompatActivity;
6 | import android.util.Log;
7 | import android.view.SurfaceHolder;
8 | import android.view.SurfaceView;
9 | import android.view.View;
10 | import android.view.ViewGroup;
11 | import com.google.mediapipe.formats.proto.LandmarkProto.NormalizedLandmark;
12 | import com.google.mediapipe.formats.proto.LandmarkProto.NormalizedLandmarkList;
13 | import com.google.mediapipe.components.CameraHelper;
14 | import com.google.mediapipe.components.FrameProcessor;
15 | import com.google.mediapipe.components.PermissionHelper;
16 | import com.google.mediapipe.framework.AndroidAssetUtil;
17 | import com.google.mediapipe.framework.Packet;
18 | import com.google.mediapipe.framework.PacketCallback;
19 | import com.google.mediapipe.framework.PacketGetter;
20 | import com.google.mediapipe.glutil.EglManager;
21 |
22 |
23 | /** Main activity of MediaPipe example apps. */
24 | public class MainActivity extends AppCompatActivity {
25 | private static final String TAG = "MainActivity";
26 |
27 | private static final String BINARY_GRAPH_NAME = "handtrackinggpu.binarypb";
28 | private static final String INPUT_VIDEO_STREAM_NAME = "input_video";
29 | private static final String OUTPUT_VIDEO_STREAM_NAME = "output_video";
30 | private static final String OUTPUT_HAND_PRESENCE_STREAM_NAME = "hand_presence";
31 | private static final String OUTPUT_LANDMARKS_STREAM_NAME = "hand_landmarks";
32 | private static final CameraHelper.CameraFacing CAMERA_FACING = CameraHelper.CameraFacing.FRONT;
33 |
34 |
35 | private static final boolean FLIP_FRAMES_VERTICALLY = true;
36 |
37 | static {
38 | // Load all native libraries needed by the app.
39 | System.loadLibrary("mediapipe_jni");
40 | System.loadLibrary("opencv_java3");
41 | }
42 |
43 | // {@link SurfaceTexture} where the camera-preview frames can be accessed.
44 | private SurfaceTexture previewFrameTexture;
45 | // {@link SurfaceView} that displays the camera-preview frames processed by a MediaPipe graph.
46 | private SurfaceView previewDisplayView;
47 |
48 | // Creates and manages an {@link EGLContext}.
49 | private EglManager eglManager;
50 | // Sends camera-preview frames into a MediaPipe graph for processing, and displays the processed
51 | // frames onto a {@link Surface}.
52 | private FrameProcessor processor;
53 | // Converts the GL_TEXTURE_EXTERNAL_OES texture from Android camera into a regular texture to be
54 | // consumed by {@link FrameProcessor} and the underlying MediaPipe graph.
55 | private BitmapConverter converter;
56 |
57 | // Handles camera access via the {@link CameraX} Jetpack support library.
58 | //private CameraXPreviewHelper cameraHelper;
59 | BmpProducer bitmapProducer;
60 |
61 | @Override
62 | protected void onCreate(Bundle savedInstanceState) {
63 | super.onCreate(savedInstanceState);
64 | setContentView(R.layout.activity_main);
65 | previewDisplayView = new SurfaceView(this);
66 | setupPreviewDisplayView();
67 |
68 | // Initialize asset manager so that MediaPipe native libraries can access the app assets, e.g.,
69 | // binary graphs.
70 | AndroidAssetUtil.initializeNativeAssetManager(this);
71 |
72 | eglManager = new EglManager(null);
73 | processor =
74 | new FrameProcessor(
75 | this,
76 | eglManager.getNativeContext(),
77 | BINARY_GRAPH_NAME,
78 | INPUT_VIDEO_STREAM_NAME,
79 | OUTPUT_VIDEO_STREAM_NAME);
80 | processor.getVideoSurfaceOutput().setFlipY(FLIP_FRAMES_VERTICALLY);
81 |
82 | /* processor.addPacketCallback(
83 | OUTPUT_HAND_PRESENCE_STREAM_NAME,
84 | (packet) -> {
85 | Boolean handPresence = PacketGetter.getBool(packet);
86 | if (!handPresence) {
87 | Log.d(
88 | TAG,
89 | "[TS:" + packet.getTimestamp() + "] Hand presence is false, no hands detected.");
90 | }
91 | });
92 |
93 |
94 | processor.addPacketCallback(
95 | OUTPUT_LANDMARKS_STREAM_NAME,
96 | (packet) -> {
97 | byte[] landmarksRaw = PacketGetter.getProtoBytes(packet);
98 | try {
99 | NormalizedLandmarkList landmarks = NormalizedLandmarkList.parseFrom(landmarksRaw);
100 | if (landmarks == null) {
101 | Log.d(TAG, "[TS:" + packet.getTimestamp() + "] No hand landmarks.");
102 | return;
103 | }
104 | // Note: If hand_presence is false, these landmarks are useless.
105 | Log.d(
106 | TAG,
107 | "[TS:"
108 | + packet.getTimestamp()
109 | + "] #Landmarks for hand: "
110 | + landmarks.getLandmarkCount());
111 | Log.d(TAG, getLandmarksDebugString(landmarks));
112 | } catch (Exception e) {
113 | Log.e(TAG, "Couldn't Exception received - " + e);
114 | return;
115 | }
116 | });*/
117 |
118 | PermissionHelper.checkAndRequestCameraPermissions(this);
119 | }
120 |
121 | @Override
122 | protected void onResume() {
123 | super.onResume();
124 | converter = new BitmapConverter(eglManager.getContext());
125 | //converter.setFlipY(FLIP_FRAMES_VERTICALLY);
126 | converter.setConsumer(processor);
127 | startProducer();
128 | }
129 |
130 | @Override
131 | protected void onPause() {
132 | super.onPause();
133 | converter.close();
134 | }
135 |
136 | @Override
137 | public void onRequestPermissionsResult(
138 | int requestCode, String[] permissions, int[] grantResults) {
139 | super.onRequestPermissionsResult(requestCode, permissions, grantResults);
140 | PermissionHelper.onRequestPermissionsResult(requestCode, permissions, grantResults);
141 | }
142 |
143 | private void setupPreviewDisplayView() {
144 | previewDisplayView.setVisibility(View.GONE);
145 | ViewGroup viewGroup = findViewById(R.id.preview_display_layout);
146 | viewGroup.addView(previewDisplayView);
147 |
148 | previewDisplayView
149 | .getHolder()
150 | .addCallback(
151 | new SurfaceHolder.Callback() {
152 | @Override
153 | public void surfaceCreated(SurfaceHolder holder) {
154 | processor.getVideoSurfaceOutput().setSurface(holder.getSurface());
155 | }
156 |
157 | @Override
158 | public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
159 | bitmapProducer.setCustomFrameAvailableListner(converter);
160 | }
161 |
162 | @Override
163 | public void surfaceDestroyed(SurfaceHolder holder) {
164 | processor.getVideoSurfaceOutput().setSurface(null);
165 | }
166 | });
167 | }
168 |
169 |
170 |
171 | private void startProducer(){
172 | bitmapProducer = new BmpProducer(this);
173 | previewDisplayView.setVisibility(View.VISIBLE);
174 | }
175 |
176 | private static String getLandmarksDebugString(NormalizedLandmarkList landmarks) {
177 | int landmarkIndex = 0;
178 | String landmarksString = "";
179 | for (NormalizedLandmark landmark : landmarks.getLandmarkList()) {
180 | landmarksString +=
181 | "\t\tLandmark["
182 | + landmarkIndex
183 | + "]: ("
184 | + landmark.getX()
185 | + ", "
186 | + landmark.getY()
187 | + ", "
188 | + landmark.getZ()
189 | + ")\n";
190 | ++landmarkIndex;
191 | }
192 | return landmarksString;
193 | }
194 | }
195 |
196 |
--------------------------------------------------------------------------------
/handtrackinggpu/BitmapConverter.java:
--------------------------------------------------------------------------------
1 | package co.introtuce.nex2me.mediapipehandtrackinggradlergbbitmap;
2 |
3 | import android.graphics.Bitmap;
4 | import android.opengl.GLES20;
5 | import android.util.Log;
6 |
7 | import androidx.annotation.Nullable;
8 |
9 | import com.google.mediapipe.components.TextureFrameConsumer;
10 | import com.google.mediapipe.components.TextureFrameProducer;
11 | import com.google.mediapipe.framework.AppTextureFrame;
12 | import com.google.mediapipe.glutil.GlThread;
13 | import com.google.mediapipe.glutil.ShaderUtil;
14 |
15 | import java.util.ArrayList;
16 | import java.util.Collections;
17 | import java.util.List;
18 |
19 | import javax.microedition.khronos.egl.EGLContext;
20 |
21 | public class BitmapConverter implements TextureFrameProducer, CustomFrameAvailableListner {
22 |
23 | private static final String TAG = "BitmapConverter";
24 | private static final int DEFAULT_NUM_BUFFERS = 2;
25 | private static final String THREAD_NAME = "BitmapConverter";
26 |
27 | private BitmapConverter.RenderThread thread;
28 | @Override
29 | public void setConsumer(TextureFrameConsumer next) {
30 | thread.setConsumer(next);
31 | }
32 |
33 | public void addConsumer(TextureFrameConsumer consumer) {
34 | thread.addConsumer(consumer);
35 | }
36 |
37 | public void removeConsumer(TextureFrameConsumer consumer) {
38 | thread.removeConsumer(consumer);
39 | }
40 |
41 | public BitmapConverter(EGLContext parentContext, int numBuffers){
42 | thread = new RenderThread(parentContext, numBuffers);
43 | thread.setName(THREAD_NAME);
44 | thread.start();
45 | try {
46 | thread.waitUntilReady();
47 | } catch (InterruptedException ie) {
48 | // Someone interrupted our thread. This is not supposed to happen: we own
49 | // the thread, and we are not going to interrupt it. Therefore, it is not
50 | // reasonable for this constructor to throw an InterruptedException
51 | // (which is a checked exception). If it should somehow happen that the
52 | // thread is interrupted, let's set the interrupted flag again, log the
53 | // error, and throw a RuntimeException.
54 | Thread.currentThread().interrupt();
55 | Log.e(TAG, "thread was unexpectedly interrupted: " + ie.getMessage());
56 | throw new RuntimeException(ie);
57 | }
58 | }
59 | public void setTimestampOffsetNanos(long offsetInNanos) {
60 | thread.setTimestampOffsetNanos(offsetInNanos);
61 | }
62 | public BitmapConverter(EGLContext parentContext) {
63 | this(parentContext, DEFAULT_NUM_BUFFERS);
64 | }
65 |
66 | public void close() {
67 | if (thread == null) {
68 | return;
69 | }
70 | //thread.getHandler().post(() -> thread.setSurfaceTexture(null, 0, 0));
71 | thread.quitSafely();
72 | try {
73 | thread.join();
74 | } catch (InterruptedException ie) {
75 | // Set the interrupted flag again, log the error, and throw a RuntimeException.
76 | Thread.currentThread().interrupt();
77 | Log.e(TAG, "thread was unexpectedly interrupted: " + ie.getMessage());
78 | throw new RuntimeException(ie);
79 | }
80 | }
81 |
82 | @Override
83 | public void onFrame(Bitmap bitmap) {
84 | thread.onFrame(bitmap);
85 | }
86 |
87 |
88 | private static class RenderThread extends GlThread implements CustomFrameAvailableListner{
89 | private static final long NANOS_PER_MICRO = 1000; // Nanoseconds in one microsecond.
90 | private final List consumers;
91 | private List outputFrames = null;
92 | private int outputFrameIndex = -1;
93 | private long nextFrameTimestampOffset = 0;
94 | private long timestampOffsetNanos = 0;
95 | private long previousTimestamp = 0;
96 | private Bitmap bitmap;
97 | private boolean previousTimestampValid = false;
98 |
99 | protected int destinationWidth = 0;
100 | protected int destinationHeight = 0;
101 | public RenderThread(@Nullable Object parentContext, int numBuffers) {
102 | super(parentContext);
103 | outputFrames = new ArrayList<>();
104 | outputFrames.addAll(Collections.nCopies(numBuffers, null));
105 | consumers = new ArrayList<>();
106 | }
107 | public void setConsumer(TextureFrameConsumer consumer) {
108 | synchronized (consumers) {
109 | consumers.clear();
110 | consumers.add(consumer);
111 | }
112 | }
113 |
114 | public void addConsumer(TextureFrameConsumer consumer) {
115 | synchronized (consumers) {
116 | consumers.add(consumer);
117 | }
118 | }
119 |
120 | public void removeConsumer(TextureFrameConsumer consumer) {
121 | synchronized (consumers) {
122 | consumers.remove(consumer);
123 | }
124 | }
125 |
126 | @Override
127 | public void prepareGl() {
128 | super.prepareGl();
129 |
130 | GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
131 |
132 | //renderer.setup();
133 | }
134 |
135 | @Override
136 | public void releaseGl() {
137 | for (int i = 0; i < outputFrames.size(); ++i) {
138 | teardownDestination(i);
139 | }
140 | //renderer.release();
141 | super.releaseGl(); // This releases the EGL context, so must do it after any GL calls.
142 | }
143 |
144 | public void setTimestampOffsetNanos(long offsetInNanos) {
145 | timestampOffsetNanos = offsetInNanos;
146 | }
147 |
148 | private void teardownDestination(int index) {
149 | if (outputFrames.get(index) != null) {
150 | waitUntilReleased(outputFrames.get(index));
151 | GLES20.glDeleteTextures(1, new int[] {outputFrames.get(index).getTextureName()}, 0);
152 | outputFrames.set(index, null);
153 | }
154 | }
155 |
156 | private void setupDestination(int index, int destinationTextureId) {
157 | teardownDestination(index);
158 | outputFrames.set(
159 | index, new AppTextureFrame(destinationTextureId, destinationWidth, destinationHeight));
160 |
161 | }
162 |
163 | @Override
164 | public void onFrame(Bitmap bitmap) {
165 | Log.d(TAG,"New Frame");
166 | this.bitmap = bitmap;
167 |
168 | handler.post(() -> renderNext());
169 | }
170 |
171 | protected void renderNext() {
172 | if (bitmap == null) {
173 | return;
174 | }
175 | try {
176 | synchronized (consumers) {
177 | boolean frameUpdated = false;
178 | for (TextureFrameConsumer consumer : consumers) {
179 | AppTextureFrame outputFrame = nextOutputFrame(bitmap);
180 | updateOutputFrame(outputFrame);
181 | frameUpdated = true;
182 | Log.d(TAG,"Frame updated ");
183 | if (consumer != null) {
184 | if (Log.isLoggable(TAG, Log.VERBOSE)) {
185 | Log.v(
186 | TAG,
187 | String.format(
188 | "Locking tex: %d width: %d height: %d",
189 | outputFrame.getTextureName(),
190 | outputFrame.getWidth(),
191 | outputFrame.getHeight()));
192 | }
193 | outputFrame.setInUse();
194 | Log.d(TAG,"Frame sending to consumer");
195 | consumer.onNewFrame(outputFrame);
196 | }
197 | }
198 | if (!frameUpdated) { // Need to update the frame even if there are no consumers.
199 | AppTextureFrame outputFrame = nextOutputFrame(bitmap);
200 | updateOutputFrame(outputFrame);
201 | }
202 | }
203 | } finally {
204 | //bitmap.recycle();
205 | }
206 | }
207 |
208 |
209 | /**
210 | * NOTE: must be invoked on GL thread
211 | */
212 | private AppTextureFrame nextOutputFrame(Bitmap bitmap) {
213 | int textureName = ShaderUtil.createRgbaTexture(bitmap);
214 | outputFrameIndex = (outputFrameIndex + 1) % outputFrames.size();
215 | destinationHeight = bitmap.getHeight();
216 | destinationWidth = bitmap.getWidth();
217 | setupDestination(outputFrameIndex, textureName);
218 | AppTextureFrame outputFrame = outputFrames.get(outputFrameIndex);
219 | waitUntilReleased(outputFrame);
220 | return outputFrame;
221 | }
222 | private long timestamp=1l;
223 | private void updateOutputFrame(AppTextureFrame outputFrame) {
224 | // Populate frame timestamp with surface texture timestamp after render() as renderer
225 | // ensures that surface texture has the up-to-date timestamp. (Also adjust
226 | // |nextFrameTimestampOffset| to ensure that timestamps increase monotonically.)
227 | timestamp = timestamp+1;
228 | long textureTimestamp =
229 | (timestamp + timestampOffsetNanos) / NANOS_PER_MICRO;
230 | if (previousTimestampValid
231 | && textureTimestamp + nextFrameTimestampOffset <= previousTimestamp) {
232 | nextFrameTimestampOffset = previousTimestamp + 1 - textureTimestamp;
233 | }
234 | outputFrame.setTimestamp(textureTimestamp + nextFrameTimestampOffset);
235 | previousTimestamp = outputFrame.getTimestamp();
236 | previousTimestampValid = true;
237 | }
238 |
239 | private void waitUntilReleased(AppTextureFrame frame) {
240 | try {
241 | if (Log.isLoggable(TAG, Log.VERBOSE)) {
242 | Log.v(
243 | TAG,
244 | String.format(
245 | "Waiting for tex: %d width: %d height: %d",
246 | frame.getTextureName(), frame.getWidth(), frame.getHeight()));
247 | }
248 | frame.waitUntilReleased();
249 | if (Log.isLoggable(TAG, Log.VERBOSE)) {
250 | Log.v(
251 | TAG,
252 | String.format(
253 | "Finished waiting for tex: %d width: %d height: %d",
254 | frame.getTextureName(), frame.getWidth(), frame.getHeight()));
255 | }
256 | } catch (InterruptedException ie) {
257 | // Someone interrupted our thread. This is not supposed to happen: we own
258 | // the thread, and we are not going to interrupt it. If it should somehow
259 | // happen that the thread is interrupted, let's set the interrupted flag
260 | // again, log the error, and throw a RuntimeException.
261 | Thread.currentThread().interrupt();
262 | Log.e(TAG, "thread was unexpectedly interrupted: " + ie.getMessage());
263 | throw new RuntimeException(ie);
264 | }
265 | }
266 | }
267 |
268 | }
269 |
--------------------------------------------------------------------------------