added simple posetracking camera example

This commit is contained in:
Mautisim Munir 2022-10-12 00:41:15 +05:00
parent 9b54cae625
commit 482fc13826
10 changed files with 511 additions and 1 deletions

3
.gitignore vendored
View File

@ -6,4 +6,5 @@ mediapipe/provisioning_profile.mobileprovision
.user.bazelrc .user.bazelrc
/mediapipe/examples/android/solutions/posetracking/libs/ /mediapipe/examples/android/solutions/posetracking/libs/
/mediapipe/examples/android/solutions/.idea/ /mediapipe/examples/android/solutions/.idea/
/mediapipe/examples/android/solutions/ /mediapipe/examples/android/solutions/posetracking-camera/libs/
/mediapipe/examples/android/solutions/posetracking-camera/build/

View File

@ -0,0 +1,57 @@
plugins {
id 'com.android.application'
}
android {
compileSdkVersion 30
buildToolsVersion "30.0.3"
defaultConfig {
applicationId "com.google.mediapipe.apps.posetracking_camera"
minSdkVersion 21
targetSdkVersion 30
versionCode 1
versionName "1.0"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
}
dependencies {
// Copper Labs AAR Files
implementation fileTree(dir: 'libs', include: ['*.jar', '*.aar'])
// App Dependencies
implementation 'androidx.appcompat:appcompat:1.3.0'
implementation 'com.google.android.material:material:1.3.0'
implementation 'androidx.constraintlayout:constraintlayout:2.0.4'
implementation 'androidx.exifinterface:exifinterface:1.3.3'
testImplementation 'junit:junit:4.+'
androidTestImplementation 'androidx.test.ext:junit:1.1.2'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.3.0'
// Mediapipe dependencies
implementation 'com.google.protobuf:protobuf-javalite:3.19.1'
implementation 'com.google.flogger:flogger:latest.release'
implementation 'com.google.flogger:flogger-system-backend:latest.release'
implementation 'com.google.code.findbugs:jsr305:latest.release'
implementation 'com.google.guava:guava:27.0.1-android'
// CameraX core library
def camerax_version = "1.0.0-beta10"
implementation "androidx.camera:camera-core:$camerax_version"
implementation "androidx.camera:camera-camera2:$camerax_version"
implementation "androidx.camera:camera-lifecycle:$camerax_version"
// We cannot use official solution as it is missing dependencies for pose tracking.
// implementation 'com.google.mediapipe:solution-core:latest.release'
}

View File

@ -0,0 +1,32 @@
POSE_TRACKING_OUTPUT_DIR=bazel-bin/mediapipe/java/com/google/mediapipe/solutions/posetracking
GRADLE_LIBS_DIR=mediapipe/examples/android/solutions/posetracking-camera/libs
#
bazel build -c opt --strip=ALWAYS\
--host_crosstool_top=@bazel_tools//tools/cpp:toolchain \
--fat_apk_cpu=arm64-v8a,armeabi-v7a \
--legacy_whole_archive=0 \
--features=-legacy_whole_archive \
--copt=-fvisibility=hidden \
--copt=-ffunction-sections \
--copt=-fdata-sections \
--copt=-fstack-protector \
--copt=-Oz \
--copt=-fomit-frame-pointer \
--copt=-DABSL_MIN_LOG_LEVEL=2 \
--linkopt=-Wl,--gc-sections,--strip-all \
//mediapipe/java/com/google/mediapipe/solutions/posetracking:copperlabs-pose-api.aar \
//mediapipe/java/com/google/mediapipe/solutions/posetracking:copperlabs-pose-landmark.aar \
//mediapipe/java/com/google/mediapipe/solutions/posetracking:copperlabs-pose-detection.aar \
//mediapipe/java/com/google/mediapipe/solutions/posetracking:copperlabs-pose-graph.aar \
//mediapipe/java/com/google/mediapipe/solutioncore:copperlabs-mediapipe
mkdir $GRADLE_LIBS_DIR
rm -f $GRADLE_LIBS_DIR/copperlabs-*.aar
\cp $POSE_TRACKING_OUTPUT_DIR/copperlabs-pose-api.aar $GRADLE_LIBS_DIR
\cp $POSE_TRACKING_OUTPUT_DIR/copperlabs-pose-detection.aar $GRADLE_LIBS_DIR
\cp $POSE_TRACKING_OUTPUT_DIR/copperlabs-pose-graph.aar $GRADLE_LIBS_DIR
\cp $POSE_TRACKING_OUTPUT_DIR/copperlabs-pose-landmark.aar $GRADLE_LIBS_DIR
\cp bazel-bin/mediapipe/java/com/google/mediapipe/solutioncore/copperlabs-mediapipe.aar $GRADLE_LIBS_DIR

View File

@ -0,0 +1,21 @@
# Add project specific ProGuard rules here.
# You can control the set of applied configuration files using the
# proguardFiles setting in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable
# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile

View File

@ -0,0 +1,35 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.google.mediapipe.examples.posetracking_camera">
<uses-sdk
android:minSdkVersion="21"
android:targetSdkVersion="30" />
<!-- For loading images from gallery -->
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
<!-- For using the camera -->
<uses-permission android:name="android.permission.CAMERA" />
<uses-feature android:name="android.hardware.camera" />
<!-- For logging solution events -->
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
<application
android:allowBackup="true"
android:icon="@mipmap/ic_launcher"
android:label="CopperLabs PoseTracking"
android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true"
android:theme="@style/AppTheme">
<activity android:name=".MainActivity"
android:screenOrientation="portrait">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>

View File

@ -0,0 +1,50 @@
# Copyright 2021 The MediaPipe Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
licenses(["notice"])
package(default_visibility = ["//visibility:private"])
android_binary(
name = "posetracking-camera",
srcs = glob(["**/*.java"]),
custom_package = "com.google.mediapipe.examples.posetracking_camera",
manifest = "AndroidManifest.xml",
manifest_values = {
"applicationId": "com.google.mediapipe.examples.posetracking_camera",
},
multidex = "native",
resource_files = ["//mediapipe/examples/android/solutions:resource_files"],
deps = [
"//mediapipe/framework/formats:detection_java_proto_lite",
"//mediapipe/framework/formats:landmark_java_proto_lite",
"//mediapipe/framework/formats:location_data_java_proto_lite",
"//mediapipe/java/com/google/mediapipe/solutioncore:camera_input",
"//mediapipe/java/com/google/mediapipe/solutioncore:mediapipe_jni_lib",
"//mediapipe/java/com/google/mediapipe/solutioncore:solution_rendering",
"//mediapipe/java/com/google/mediapipe/solutioncore:video_input",
"//mediapipe/java/com/google/mediapipe/solutions/posetracking:copperlabs-pose-api",
"//mediapipe/java/com/google/mediapipe/solutions/posetracking:copperlabs-pose-detection",
"//mediapipe/java/com/google/mediapipe/solutions/posetracking:copperlabs-pose-graph",
"//mediapipe/java/com/google/mediapipe/solutions/posetracking:copperlabs-pose-landmark",
"//third_party:androidx_appcompat",
"//third_party:androidx_constraint_layout",
"//third_party:opencv",
"@maven//:androidx_activity_activity",
"@maven//:androidx_concurrent_concurrent_futures",
"@maven//:androidx_exifinterface_exifinterface",
"@maven//:androidx_fragment_fragment",
"@maven//:com_google_guava_guava",
],
)

View File

@ -0,0 +1,158 @@
// Copyright 2021 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.mediapipe.examples.posetracking_camera;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.Matrix;
import android.os.Bundle;
import android.os.Looper;
import android.provider.MediaStore;
import androidx.appcompat.app.AppCompatActivity;
import android.util.Log;
import android.view.Surface;
import android.view.View;
import android.widget.Button;
import android.widget.FrameLayout;
import android.widget.ImageView;
import androidx.activity.result.ActivityResultLauncher;
import androidx.activity.result.contract.ActivityResultContracts;
import androidx.exifinterface.media.ExifInterface;
// ContentResolver dependency
import com.google.mediapipe.formats.proto.LandmarkProto;
import com.google.mediapipe.solutioncore.CameraInput;
import com.google.mediapipe.solutioncore.SolutionGlSurfaceView;
import com.google.mediapipe.solutioncore.VideoInput;
import com.google.mediapipe.solutions.posetracking.PoseTracking;
import com.google.mediapipe.solutions.posetracking.PoseTrackingOptions;
import com.google.mediapipe.solutions.posetracking.PoseTrackingResult;
//import com.google.mediapipe.solutions.posetracking.FaceKeypoint;
import java.io.IOException;
import java.io.InputStream;
/** Main activity of MediaPipe Face Detection app. */
public class MainActivity extends AppCompatActivity {
private static final String TAG = "MainActivity";
private static final int rotation = Surface.ROTATION_0;
private PoseTracking poseTracking;
private ActivityResultLauncher<Intent> videoGetter;
// Live camera demo UI and camera components.
private CameraInput cameraInput;
private SolutionGlSurfaceView<PoseTrackingResult> glSurfaceView;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
disableRedundantUI();
setupLiveDemoUiComponents();
}
/** Sets up the UI components for the live demo with camera input. */
private void setupLiveDemoUiComponents() {
Button startCameraButton = findViewById(R.id.button_start_camera);
startCameraButton.setOnClickListener(
v -> {
setupStreamingModePipeline();
startCameraButton.setVisibility(View.GONE);
});
}
/**Disables unecesary UI buttons*/
private void disableRedundantUI(){
findViewById(R.id.button_load_picture).setVisibility(View.GONE);
findViewById(R.id.button_load_video).setVisibility(View.GONE);
}
/** Sets up core workflow for streaming mode. */
private void setupStreamingModePipeline() {
// Initializes a new MediaPipe Face Detection solution instance in the streaming mode.
poseTracking =
new PoseTracking(
this,
PoseTrackingOptions.builder()
.setStaticImageMode(false)
.setLandmarkVisibility(true)
.setModelComplexity(0)
.setSmoothLandmarks(true)
.build());
poseTracking.setErrorListener(
(message, e) -> Log.e(TAG, "MediaPipe Face Detection error:" + message));
cameraInput = new CameraInput(this);
cameraInput.setNewFrameListener(textureFrame -> poseTracking.send(textureFrame));
// Initializes a new Gl surface view with a user-defined PoseTrackingResultGlRenderer.
glSurfaceView =
new SolutionGlSurfaceView<>(
this, poseTracking.getGlContext(), poseTracking.getGlMajorVersion());
glSurfaceView.setSolutionResultRenderer(new PoseTrackingResultGlRenderer());
glSurfaceView.setRenderInputImage(true);
poseTracking.setResultListener(
poseTrackingResult -> {
logExampleKeypoint(poseTrackingResult);
glSurfaceView.setRenderData(poseTrackingResult);
glSurfaceView.requestRender();
});
// The runnable to start camera after the gl surface view is attached.
// For video input source, videoInput.start() will be called when the video uri is available.
glSurfaceView.post(this::startCamera);
// Updates the preview layout.
FrameLayout frameLayout = findViewById(R.id.preview_display_layout);
frameLayout.removeAllViewsInLayout();
frameLayout.addView(glSurfaceView);
glSurfaceView.setVisibility(View.VISIBLE);
frameLayout.requestLayout();
}
private void startCamera() {
cameraInput.getConverter(poseTracking.getGlContext()).setRotation(rotation);
cameraInput.start(
this,
poseTracking.getGlContext(),
CameraInput.CameraFacing.FRONT,
glSurfaceView.getWidth(),
glSurfaceView.getHeight());
}
private void logExampleKeypoint(
PoseTrackingResult result) {
if (result.multiPoseTrackings().isEmpty()) {
return;
}
LandmarkProto.Landmark exampleLandmark = result.multiPoseLandmarks().get(0);
Log.i(
TAG,
String.format(
"Pose Landmark Landmark at index 0: x=%f, y=%f, z=%f",
exampleLandmark.getX() , exampleLandmark.getY(),exampleLandmark.getZ() ));
}
}

View File

@ -0,0 +1,154 @@
// Copyright 2021 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.mediapipe.examples.posetracking_camera;
import android.opengl.GLES20;
import com.google.mediapipe.formats.proto.DetectionProto.Detection;
import com.google.mediapipe.solutioncore.ResultGlRenderer;
import com.google.mediapipe.solutions.posetracking.PoseTrackingResult;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
/** A custom implementation of {@link ResultGlRenderer} to render {@link PoseTrackingResult}. */
public class PoseTrackingResultGlRenderer implements ResultGlRenderer<PoseTrackingResult> {
private static final String TAG = "PoseTrackingResultGlRenderer";
private static final float[] KEYPOINT_COLOR = new float[] {1f, 0f, 0f, 1f};
private static final float KEYPOINT_SIZE = 16f;
private static final float[] BBOX_COLOR = new float[] {0f, 1f, 0f, 1f};
private static final int BBOX_THICKNESS = 8;
private static final String VERTEX_SHADER =
"uniform mat4 uProjectionMatrix;\n"
+ "uniform float uPointSize;\n"
+ "attribute vec4 vPosition;\n"
+ "void main() {\n"
+ " gl_Position = uProjectionMatrix * vPosition;\n"
+ " gl_PointSize = uPointSize;"
+ "}";
private static final String FRAGMENT_SHADER =
"precision mediump float;\n"
+ "uniform vec4 uColor;\n"
+ "void main() {\n"
+ " gl_FragColor = uColor;\n"
+ "}";
private int program;
private int positionHandle;
private int pointSizeHandle;
private int projectionMatrixHandle;
private int colorHandle;
private int loadShader(int type, String shaderCode) {
int shader = GLES20.glCreateShader(type);
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
@Override
public void setupRendering() {
program = GLES20.glCreateProgram();
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, VERTEX_SHADER);
int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, FRAGMENT_SHADER);
GLES20.glAttachShader(program, vertexShader);
GLES20.glAttachShader(program, fragmentShader);
GLES20.glLinkProgram(program);
positionHandle = GLES20.glGetAttribLocation(program, "vPosition");
pointSizeHandle = GLES20.glGetUniformLocation(program, "uPointSize");
projectionMatrixHandle = GLES20.glGetUniformLocation(program, "uProjectionMatrix");
colorHandle = GLES20.glGetUniformLocation(program, "uColor");
}
/**
* No needed anymore to be deleted
* **/
@Override
public void renderResult(PoseTrackingResult result, float[] projectionMatrix) {
if (result == null) {
return;
}
GLES20.glUseProgram(program);
GLES20.glUniformMatrix4fv(projectionMatrixHandle, 1, false, projectionMatrix, 0);
// GLES20.glUniform1f(pointSizeHandle, KEYPOINT_SIZE);
// int numDetectedFaces = result.multiPoseTrackings().size();
// for (int i = 0; i < numDetectedFaces; ++i) {
// drawDetection(result.multiPoseTrackings().get(i));
// }
}
/**
* Deletes the shader program.
*
* <p>This is only necessary if one wants to release the program while keeping the context around.
*/
public void release() {
GLES20.glDeleteProgram(program);
}
/**
* Not needed anymore, to be cleaned
* */
private void drawDetection(Detection detection) {
if (!detection.hasLocationData()) {
return;
}
// Draw keypoints.
// float[] points = new float[FaceKeypoint.NUM_KEY_POINTS * 2];
// for (int i = 0; i < FaceKeypoint.NUM_KEY_POINTS; ++i) {
// points[2 * i] = detection.getLocationData().getRelativeKeypoints(i).getX();
// points[2 * i + 1] = detection.getLocationData().getRelativeKeypoints(i).getY();
// }
// GLES20.glUniform4fv(colorHandle, 1, KEYPOINT_COLOR, 0);
// FloatBuffer vertexBuffer =
// ByteBuffer.allocateDirect(points.length * 4)
// .order(ByteOrder.nativeOrder())
// .asFloatBuffer()
// .put(points);
// vertexBuffer.position(0);
// GLES20.glEnableVertexAttribArray(positionHandle);
// GLES20.glVertexAttribPointer(positionHandle, 2, GLES20.GL_FLOAT, false, 0, vertexBuffer);
// GLES20.glDrawArrays(GLES20.GL_POINTS, 0, FaceKeypoint.NUM_KEY_POINTS);
if (!detection.getLocationData().hasRelativeBoundingBox()) {
return;
}
// Draw bounding box.
// float left = detection.getLocationData().getRelativeBoundingBox().getXmin();
// float top = detection.getLocationData().getRelativeBoundingBox().getYmin();
// float right = left + detection.getLocationData().getRelativeBoundingBox().getWidth();
// float bottom = top + detection.getLocationData().getRelativeBoundingBox().getHeight();
// drawLine(top, left, top, right);
// drawLine(bottom, left, bottom, right);
// drawLine(top, left, bottom, left);
// drawLine(top, right, bottom, right);
}
private void drawLine(float y1, float x1, float y2, float x2) {
GLES20.glUniform4fv(colorHandle, 1, BBOX_COLOR, 0);
GLES20.glLineWidth(BBOX_THICKNESS);
float[] vertex = {x1, y1, x2, y2};
FloatBuffer vertexBuffer =
ByteBuffer.allocateDirect(vertex.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer()
.put(vertex);
vertexBuffer.position(0);
GLES20.glEnableVertexAttribArray(positionHandle);
GLES20.glVertexAttribPointer(positionHandle, 2, GLES20.GL_FLOAT, false, 0, vertexBuffer);
GLES20.glDrawArrays(GLES20.GL_LINES, 0, 2);
}
}

View File

@ -0,0 +1 @@
../../../res

View File

@ -3,3 +3,4 @@ include ':facedetection'
include ':facemesh' include ':facemesh'
include ':posetracking' include ':posetracking'
include ':hands' include ':hands'
include ':posetracking-camera'