basic api implementation complete

This commit is contained in:
Mautisim Munir 2022-10-15 14:05:39 +05:00
parent 333c18569f
commit 1f80150c5e
6 changed files with 237 additions and 32 deletions

View File

@ -24,6 +24,8 @@ import androidx.appcompat.app.AppCompatActivity;
import com.google.mediapipe.solutions.posetracking.ComputerVisionPlugin;
import com.google.mediapipe.solutions.posetracking.Lindera;
import java.util.List;
/**
* Main activity of MediaPipe Face Detection app.
@ -47,6 +49,8 @@ public class MainActivity extends AppCompatActivity {
setupLiveDemoUiComponents();
plugin = new ComputerVisionPluginImpl();
lindera = new Lindera(plugin);
List<String> cameras = lindera.getAvailableCameras();
lindera.setCamera("BACK");
}

View File

@ -23,6 +23,7 @@ POSE_TRACKING_SRCS = [
"PoseTrackingResult.java",
"Lindera.java",
"PoseTrackingResultGlRenderer.java",
"CameraRotation.java",
]
android_library(

View File

@ -0,0 +1,115 @@
package com.google.mediapipe.solutions.posetracking;
public class BodyJoints {
public XYZPointWithConfidence nose;
public XYZPointWithConfidence leftEyeInner;
public XYZPointWithConfidence leftEye;
public XYZPointWithConfidence leftEyeOuter;
public XYZPointWithConfidence rightEyeInner;
public XYZPointWithConfidence rightEye;
public XYZPointWithConfidence rightEyeOuter;
public XYZPointWithConfidence leftEar;
public XYZPointWithConfidence rightEar;
public XYZPointWithConfidence mouthLeft;
public XYZPointWithConfidence mouthRight;
public XYZPointWithConfidence leftShoulder;
public XYZPointWithConfidence rightShoulder;
public XYZPointWithConfidence leftElbow;
public XYZPointWithConfidence rightElbow;
public XYZPointWithConfidence leftWrist;
public XYZPointWithConfidence rightWrist;
public XYZPointWithConfidence leftPinky;
public XYZPointWithConfidence rightPinky;
public XYZPointWithConfidence leftIndex;
public XYZPointWithConfidence rightIndex;
public XYZPointWithConfidence leftThumb;
public XYZPointWithConfidence rightThumb;
public XYZPointWithConfidence leftHip;
public XYZPointWithConfidence rightHip;
public XYZPointWithConfidence leftKnee;
public XYZPointWithConfidence rightKnee;
public XYZPointWithConfidence rightAnkle;
public XYZPointWithConfidence leftAnkle;
public XYZPointWithConfidence rightHeel;
public XYZPointWithConfidence leftHeel;
public XYZPointWithConfidence rightFoot;
public XYZPointWithConfidence leftFoot;
// public XYZPointWithConfidence pelvis;
//
// public XYZPointWithConfidence spine;
// public XYZPointWithConfidence thorax;
// public XYZPointWithConfidence neckNose;
// public XYZPointWithConfidence headTop;
public BodyJoints() {
nose = new XYZPointWithConfidence();
leftEyeInner= new XYZPointWithConfidence();
leftEye= new XYZPointWithConfidence();
leftEyeOuter= new XYZPointWithConfidence();
rightEyeInner= new XYZPointWithConfidence();
rightEye= new XYZPointWithConfidence();
rightEyeOuter= new XYZPointWithConfidence();
leftEar= new XYZPointWithConfidence();
rightEar= new XYZPointWithConfidence();
mouthLeft= new XYZPointWithConfidence();
mouthRight= new XYZPointWithConfidence();
leftShoulder= new XYZPointWithConfidence();
rightShoulder= new XYZPointWithConfidence();
leftElbow= new XYZPointWithConfidence();
rightElbow= new XYZPointWithConfidence();
leftWrist= new XYZPointWithConfidence();
rightWrist= new XYZPointWithConfidence();
leftPinky= new XYZPointWithConfidence();
rightPinky= new XYZPointWithConfidence();
leftIndex= new XYZPointWithConfidence();
rightIndex= new XYZPointWithConfidence();
leftThumb= new XYZPointWithConfidence();
rightThumb= new XYZPointWithConfidence();
leftHip= new XYZPointWithConfidence();
rightHip= new XYZPointWithConfidence();
leftKnee= new XYZPointWithConfidence();
rightKnee= new XYZPointWithConfidence();
rightAnkle= new XYZPointWithConfidence();
leftAnkle= new XYZPointWithConfidence();
rightHeel= new XYZPointWithConfidence();
leftHeel= new XYZPointWithConfidence();
rightFoot= new XYZPointWithConfidence();
leftFoot= new XYZPointWithConfidence();
}
}

View File

@ -0,0 +1,25 @@
package com.google.mediapipe.solutions.posetracking;
import android.view.Surface;
//public class CameraRotation {
//
// public static final int FIXED_0_DEG = Surface.ROTATION_0;
// public static final int FIXED_180_DEG = Surface.ROTATION_180;
// public static final int FIXED_270_DEG = Surface.ROTATION_270;
// public static final int FIXED_90_DEG = Surface.ROTATION_90;
// public static final int AUTOMATIC = -1;
//
//}
public enum CameraRotation {
FIXED_0_DEG(Surface.ROTATION_0),FIXED_90_DEG(Surface.ROTATION_90),FIXED_180_DEG(Surface.ROTATION_180),FIXED_270_DEG(Surface.ROTATION_270),AUTOMATIC(-1);
private final int value;
private CameraRotation(int rotation) {
value = rotation;
}
public int getValue() {
return value;
}
}

View File

@ -12,29 +12,80 @@ import com.google.mediapipe.formats.proto.LandmarkProto;
import com.google.mediapipe.solutioncore.CameraInput;
import com.google.mediapipe.solutioncore.SolutionGlSurfaceView;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
public class Lindera {
private ComputerVisionPlugin plugin;
private static final int rotation = Surface.ROTATION_0;
private PoseTracking poseTracking;
// TODO: Verify that this is the timestamp used in Actual Plugin
private int timeStamp = 0;
private CameraRotation cameraRotation = CameraRotation.AUTOMATIC;
// Live camera demo UI and camera components.
private CameraInput cameraInput;
private SolutionGlSurfaceView<PoseTrackingResult> glSurfaceView;
private CameraInput.CameraFacing cameraFacing = CameraInput.CameraFacing.FRONT;
private AppCompatActivity appCompatActivity;
private ViewGroup computerVisionContainerView;
public Lindera(ComputerVisionPlugin plugin){
this.plugin = plugin;
}
public void initialize (ViewGroup computerVisionContainerView , AppCompatActivity appCompatActivity){
setupStreamingModePipeline(computerVisionContainerView,appCompatActivity);
this.computerVisionContainerView = computerVisionContainerView;
this.appCompatActivity = appCompatActivity;
startDetection();
}
public void setCameraRotation(CameraRotation cameraRotation){
this.cameraRotation = cameraRotation;
}
public void setupEventListener() {
poseTracking.setResultListener(
poseTrackingResult -> {
glSurfaceView.setRenderData(poseTrackingResult);
glSurfaceView.requestRender();
ImmutableList<LandmarkProto.Landmark> landmarks = poseTrackingResult.multiPoseLandmarks();
timeStamp+=1;
if (landmarks.isEmpty()) return;
BodyJoints bodyJoints = new BodyJoints();
landmarksToBodyJoints(landmarks,bodyJoints);
plugin.bodyJoints(timeStamp, bodyJoints);
});
}
public List<String> getAvailableCameras(){
return Arrays.stream(CameraInput.CameraFacing.values()).map(Enum::name).collect(Collectors.toList());
}
public void doOnDestroy(){
stopDetection();
appCompatActivity = null;
computerVisionContainerView = null;
}
/** Sets up core workflow for streaming mode. */
private void setupStreamingModePipeline(ViewGroup computerVisionContainerView,AppCompatActivity appCompatActivity) {
/**
* Will need to restart camera pipeline if already started
* @param name One of FRONT or BACK
*/
public void setCamera(String name){
cameraFacing = CameraInput.CameraFacing.valueOf(name);
}
public void startDetection(){
// ensure that class is initalized
assert (appCompatActivity != null);
// Initializes a new MediaPipe Face Detection solution instance in the streaming mode.
poseTracking =
new PoseTracking(
@ -62,7 +113,7 @@ public class Lindera {
setupEventListener();
// The runnable to start camera after the gl surface view is attached.
// For video input source, videoInput.start() will be called when the video uri is available.
glSurfaceView.post(()->{this.startCamera(appCompatActivity);});
glSurfaceView.post(this::startCamera);
// Updates the preview layout.
computerVisionContainerView.removeAllViewsInLayout();
@ -70,33 +121,21 @@ public class Lindera {
glSurfaceView.setVisibility(View.VISIBLE);
computerVisionContainerView.requestLayout();
}
private void startCamera(AppCompatActivity appCompatActivity) {
cameraInput.getConverter(poseTracking.getGlContext()).setRotation(rotation);
cameraInput.start(
appCompatActivity,
poseTracking.getGlContext(),
CameraInput.CameraFacing.FRONT,
glSurfaceView.getWidth(),
glSurfaceView.getHeight());
public void stopDetection(){
if (cameraInput != null) {
cameraInput.setNewFrameListener(null);
cameraInput.close();
}
if (glSurfaceView != null) {
glSurfaceView.setVisibility(View.GONE);
}
if (poseTracking != null) {
poseTracking.close();
}
timeStamp = 0;
}
public void setupEventListener() {
poseTracking.setResultListener(
poseTrackingResult -> {
glSurfaceView.setRenderData(poseTrackingResult);
glSurfaceView.requestRender();
ImmutableList<LandmarkProto.Landmark> landmarks = poseTrackingResult.multiPoseLandmarks();
timeStamp+=1;
if (landmarks.isEmpty()) return;
BodyJoints bodyJoints = new BodyJoints();
landmarksToBodyJoints(landmarks,bodyJoints);
plugin.bodyJoints(timeStamp, bodyJoints);
});
}
private void landmarkToXYZPointWithConfidence(LandmarkProto.Landmark landmark,XYZPointWithConfidence bodyJoint){
bodyJoint.x = landmark.getX();
bodyJoint.y = landmark.getY();
@ -155,7 +194,17 @@ public class Lindera {
landmarkToXYZPointWithConfidence(landmarks.get(PoseTrackingResult.LEFT_FOOT), bodyJoints.leftFoot);
}
private void startCamera() {
if (cameraRotation!=CameraRotation.AUTOMATIC) {
cameraInput.getConverter(poseTracking.getGlContext()).setRotation(cameraRotation.getValue());
}
cameraInput.start(
appCompatActivity,
poseTracking.getGlContext(),
cameraFacing,
glSurfaceView.getWidth(),
glSurfaceView.getHeight());
}
}

View File

@ -0,0 +1,11 @@
package com.google.mediapipe.solutions.posetracking;
public class XYZPointWithConfidence {
public float x = 0;
public float y = 0;
public float z = 0;
public float confidence = 0;
}