fixed model loading issue with glsurfaceview moved to library

This commit is contained in:
Mautisim Munir 2022-10-15 09:50:38 +05:00
parent 016bbe47e6
commit 1c3fc550e9
8 changed files with 11 additions and 180 deletions

View File

@ -39,7 +39,7 @@ import com.google.mediapipe.solutioncore.VideoInput;
import com.google.mediapipe.solutions.posetracking.PoseTracking; import com.google.mediapipe.solutions.posetracking.PoseTracking;
import com.google.mediapipe.solutions.posetracking.PoseTrackingOptions; import com.google.mediapipe.solutions.posetracking.PoseTrackingOptions;
import com.google.mediapipe.solutions.posetracking.PoseTrackingResult; import com.google.mediapipe.solutions.posetracking.PoseTrackingResult;
//import com.google.mediapipe.solutions.posetracking.FaceKeypoint; import com.google.mediapipe.solutions.posetracking.PoseTrackingResultGlRenderer;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;

View File

@ -1,156 +0,0 @@
// Copyright 2021 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.mediapipe.examples.posetracking_camera;
import android.opengl.GLES20;
import com.google.mediapipe.formats.proto.DetectionProto.Detection;
import com.google.mediapipe.solutioncore.ResultGlRenderer;
import com.google.mediapipe.solutions.posetracking.PoseTrackingResult;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
;
/** A custom implementation of {@link ResultGlRenderer} to render {@link PoseTrackingResult}. */
public class PoseTrackingResultGlRenderer implements ResultGlRenderer<PoseTrackingResult> {
private static final String TAG = "PoseTrackingResultGlRenderer";
private static final float[] KEYPOINT_COLOR = new float[] {1f, 0f, 0f, 1f};
private static final float KEYPOINT_SIZE = 16f;
private static final float[] BBOX_COLOR = new float[] {0f, 1f, 0f, 1f};
private static final int BBOX_THICKNESS = 8;
private static final String VERTEX_SHADER =
"uniform mat4 uProjectionMatrix;\n"
+ "uniform float uPointSize;\n"
+ "attribute vec4 vPosition;\n"
+ "void main() {\n"
+ " gl_Position = uProjectionMatrix * vPosition;\n"
+ " gl_PointSize = uPointSize;"
+ "}";
private static final String FRAGMENT_SHADER =
"precision mediump float;\n"
+ "uniform vec4 uColor;\n"
+ "void main() {\n"
+ " gl_FragColor = uColor;\n"
+ "}";
private int program;
private int positionHandle;
private int pointSizeHandle;
private int projectionMatrixHandle;
private int colorHandle;
private int loadShader(int type, String shaderCode) {
int shader = GLES20.glCreateShader(type);
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
@Override
public void setupRendering() {
program = GLES20.glCreateProgram();
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, VERTEX_SHADER);
int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, FRAGMENT_SHADER);
GLES20.glAttachShader(program, vertexShader);
GLES20.glAttachShader(program, fragmentShader);
GLES20.glLinkProgram(program);
positionHandle = GLES20.glGetAttribLocation(program, "vPosition");
pointSizeHandle = GLES20.glGetUniformLocation(program, "uPointSize");
projectionMatrixHandle = GLES20.glGetUniformLocation(program, "uProjectionMatrix");
colorHandle = GLES20.glGetUniformLocation(program, "uColor");
}
/**
* No needed anymore to be deleted
* **/
@Override
public void renderResult(PoseTrackingResult result, float[] projectionMatrix) {
if (result == null) {
return;
}
GLES20.glUseProgram(program);
GLES20.glUniformMatrix4fv(projectionMatrixHandle, 1, false, projectionMatrix, 0);
// GLES20.glUniform1f(pointSizeHandle, KEYPOINT_SIZE);
// int numDetectedFaces = result.multiPoseTrackings().size();
// for (int i = 0; i < numDetectedFaces; ++i) {
// drawDetection(result.multiPoseTrackings().get(i));
// }
}
/**
* Deletes the shader program.
*
* <p>This is only necessary if one wants to release the program while keeping the context around.
*/
public void release() {
GLES20.glDeleteProgram(program);
}
/**
* Not needed anymore, to be cleaned
* */
private void drawDetection(Detection detection) {
if (!detection.hasLocationData()) {
return;
}
// Draw keypoints.
// float[] points = new float[FaceKeypoint.NUM_KEY_POINTS * 2];
// for (int i = 0; i < FaceKeypoint.NUM_KEY_POINTS; ++i) {
// points[2 * i] = detection.getLocationData().getRelativeKeypoints(i).getX();
// points[2 * i + 1] = detection.getLocationData().getRelativeKeypoints(i).getY();
// }
// GLES20.glUniform4fv(colorHandle, 1, KEYPOINT_COLOR, 0);
// FloatBuffer vertexBuffer =
// ByteBuffer.allocateDirect(points.length * 4)
// .order(ByteOrder.nativeOrder())
// .asFloatBuffer()
// .put(points);
// vertexBuffer.position(0);
// GLES20.glEnableVertexAttribArray(positionHandle);
// GLES20.glVertexAttribPointer(positionHandle, 2, GLES20.GL_FLOAT, false, 0, vertexBuffer);
// GLES20.glDrawArrays(GLES20.GL_POINTS, 0, FaceKeypoint.NUM_KEY_POINTS);
if (!detection.getLocationData().hasRelativeBoundingBox()) {
return;
}
// Draw bounding box.
// float left = detection.getLocationData().getRelativeBoundingBox().getXmin();
// float top = detection.getLocationData().getRelativeBoundingBox().getYmin();
// float right = left + detection.getLocationData().getRelativeBoundingBox().getWidth();
// float bottom = top + detection.getLocationData().getRelativeBoundingBox().getHeight();
// drawLine(top, left, top, right);
// drawLine(bottom, left, bottom, right);
// drawLine(top, left, bottom, left);
// drawLine(top, right, bottom, right);
}
private void drawLine(float y1, float x1, float y2, float x2) {
GLES20.glUniform4fv(colorHandle, 1, BBOX_COLOR, 0);
GLES20.glLineWidth(BBOX_THICKNESS);
float[] vertex = {x1, y1, x2, y2};
FloatBuffer vertexBuffer =
ByteBuffer.allocateDirect(vertex.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer()
.put(vertex);
vertexBuffer.position(0);
GLES20.glEnableVertexAttribArray(positionHandle);
GLES20.glVertexAttribPointer(positionHandle, 2, GLES20.GL_FLOAT, false, 0, vertexBuffer);
GLES20.glDrawArrays(GLES20.GL_LINES, 0, 2);
}
}

View File

@ -14,11 +14,12 @@
<!-- For logging solution events --> <!-- For logging solution events -->
<uses-permission android:name="android.permission.INTERNET" /> <uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" /> <uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
<uses-feature android:glEsVersion="0x00020000" android:required="true" />
<application <application
android:allowBackup="true" android:allowBackup="true"
android:icon="@mipmap/ic_launcher" android:icon="@mipmap/ic_launcher"
android:label="CopperLabs PoseTracking" android:label="CopperLabs Lindera"
android:roundIcon="@mipmap/ic_launcher_round" android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true" android:supportsRtl="true"
android:theme="@style/AppTheme"> android:theme="@style/AppTheme">

View File

@ -39,6 +39,7 @@ android_binary(
"//mediapipe/java/com/google/mediapipe/solutions/posetracking:copperlabs-pose-api", "//mediapipe/java/com/google/mediapipe/solutions/posetracking:copperlabs-pose-api",
"//mediapipe/java/com/google/mediapipe/solutions/posetracking:copperlabs-pose-detection", "//mediapipe/java/com/google/mediapipe/solutions/posetracking:copperlabs-pose-detection",
"//mediapipe/java/com/google/mediapipe/solutions/posetracking:copperlabs-pose-graph", "//mediapipe/java/com/google/mediapipe/solutions/posetracking:copperlabs-pose-graph",
"//mediapipe/java/com/google/mediapipe/solutions/posetracking:copperlabs-pose-landmark",
"//third_party:androidx_appcompat", "//third_party:androidx_appcompat",
"//third_party:androidx_constraint_layout", "//third_party:androidx_constraint_layout",
"//third_party:opencv", "//third_party:opencv",

View File

@ -29,6 +29,7 @@ import android.widget.FrameLayout;
import androidx.activity.result.ActivityResultLauncher; import androidx.activity.result.ActivityResultLauncher;
import com.google.mediapipe.solutioncore.SolutionGlSurfaceView;
import com.google.mediapipe.solutions.posetracking.ComputerVisionPlugin; import com.google.mediapipe.solutions.posetracking.ComputerVisionPlugin;
import com.google.mediapipe.solutions.posetracking.Lindera; import com.google.mediapipe.solutions.posetracking.Lindera;
import com.google.mediapipe.solutions.posetracking.PoseTrackingResultGlRenderer; import com.google.mediapipe.solutions.posetracking.PoseTrackingResultGlRenderer;
@ -46,6 +47,7 @@ public class MainActivity extends AppCompatActivity {
// Live camera demo UI and camera components. // Live camera demo UI and camera components.
@Override @Override
protected void onCreate(Bundle savedInstanceState) { protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState); super.onCreate(savedInstanceState);
@ -56,10 +58,8 @@ public class MainActivity extends AppCompatActivity {
disableRedundantUI(); disableRedundantUI();
setupLiveDemoUiComponents(); setupLiveDemoUiComponents();
plugin = new ComputerVisionPluginImpl(); plugin = new ComputerVisionPluginImpl();
lindera = new Lindera(plugin); lindera = new Lindera(plugin);
new PoseTrackingResultGlRenderer();
} }
@ -75,23 +75,6 @@ public class MainActivity extends AppCompatActivity {
lindera.initialize(frameLayout, MainActivity.this); lindera.initialize(frameLayout, MainActivity.this);
ExecutorService executor = Executors.newSingleThreadExecutor();
Handler handler = new Handler(Looper.getMainLooper());
executor.execute(new Runnable() {
@Override
public void run() {
//Background work here
handler.post(new Runnable() {
@Override
public void run() {
//UI Thread work here
}
});
}
});
}); });
} }
/**Disables unecesary UI buttons*/ /**Disables unecesary UI buttons*/

View File

@ -4,5 +4,5 @@
<uses-sdk android:minSdkVersion="21" <uses-sdk android:minSdkVersion="21"
android:targetSdkVersion="27" /> android:targetSdkVersion="27" />
<uses-feature android:glEsVersion="0x00020000" android:required="true" />
</manifest> </manifest>

View File

@ -24,7 +24,9 @@ public class Lindera {
} }
public void initialize (ViewGroup computerVisionContainerView , AppCompatActivity appCompatActivity){ public void initialize (ViewGroup computerVisionContainerView , AppCompatActivity appCompatActivity){
setupStreamingModePipeline(computerVisionContainerView,appCompatActivity); setupStreamingModePipeline(computerVisionContainerView,appCompatActivity);
} }
/** Sets up core workflow for streaming mode. */ /** Sets up core workflow for streaming mode. */
private void setupStreamingModePipeline(ViewGroup computerVisionContainerView,AppCompatActivity appCompatActivity) { private void setupStreamingModePipeline(ViewGroup computerVisionContainerView,AppCompatActivity appCompatActivity) {

View File

@ -23,7 +23,7 @@ import java.nio.ByteBuffer;
import java.nio.ByteOrder; import java.nio.ByteOrder;
import java.nio.FloatBuffer; import java.nio.FloatBuffer;
;
/** A custom implementation of {@link ResultGlRenderer} to render {@link PoseTrackingResult}. */ /** A custom implementation of {@link ResultGlRenderer} to render {@link PoseTrackingResult}. */
public class PoseTrackingResultGlRenderer implements ResultGlRenderer<PoseTrackingResult> { public class PoseTrackingResultGlRenderer implements ResultGlRenderer<PoseTrackingResult> {