basic pose tracking solutions api complete

This commit is contained in:
Mautisim Munir 2022-10-05 20:20:15 +05:00
parent f7f3329505
commit d971143040
7 changed files with 88 additions and 33 deletions

View File

@ -23,8 +23,6 @@
"mediapipe/objc/testing/app/BUILD"
],
"buildTargets" : [
"//mediapipe/examples/ios/common:CommonMediaPipeAppLibrary",
"//mediapipe/examples/ios/posetrackingsolutiongpu:CommonMediaPipeAppLibrary",
"//mediapipe/examples/ios/posetrackingsolutiongpu:PoseTrackingSolutionGpuApp"
],
"optionSet" : {

View File

@ -13,7 +13,6 @@
"",
"mediapipe",
"mediapipe/examples/ios",
"mediapipe/examples/ios/common",
"mediapipe/examples/ios/facedetectioncpu",
"mediapipe/examples/ios/facedetectiongpu",
"mediapipe/examples/ios/faceeffect",

View File

@ -29,19 +29,20 @@ static const char* kLandmarksOutputStream = "pose_landmarks";
[super viewDidLoad];
PoseTrackingOptions* options = [ [PoseTrackingOptions alloc] initWithShowLandmarks:true cameraRotation:0];
self.poseTracking = [[PoseTracking alloc] initWithPoseTrackingOptions:options];
// create pose tracking options
PoseTrackingOptions* options = [ [PoseTrackingOptions alloc] initWithShowLandmarks:true cameraRotation:0];
// create pose tracking from options
self.poseTracking = [[PoseTracking alloc] initWithPoseTrackingOptions:options];
// render pose tracking to a UIView (self.liveView)
self.poseTracking.renderer.layer.frame = self.liveView.layer.bounds;
[self.liveView.layer addSublayer:self.poseTracking.renderer.layer];
// create a block to run when PoseTrackingResults are available
self.poseTracking.poseTrackingResultsListener = ^(PoseTrackingResults* results){
NSLog(@"\tLandmark[%d]: (%f, %f, %f)", 0, results.landmarks[0].x,results.landmarks[0].y,results.landmarks[0].z);
};
}
@ -54,24 +55,20 @@ static const char* kLandmarksOutputStream = "pose_landmarks";
- (void)viewWillAppear:(BOOL)animated {
[super viewWillAppear:animated];
// create and set camera options
self.cameraSource = [[MPPCameraInputSource alloc] init];
[self.cameraSource setDelegate:self.poseTracking queue:self.poseTracking.videoQueue];
self.cameraSource.sessionPreset = AVCaptureSessionPresetHigh;
self.cameraSource.cameraPosition = AVCaptureDevicePositionBack;
// self.cameraSource.cameraPosition = AVCaptureDevicePositionFront;
// // When using the front camera, mirror the input for a more natural look.
// _cameraSource.videoMirrored = YES;
// When using the front camera, mirror the input for a more natural look.
//self.cameraSource.videoMirrored = YES;
// The frame's native format is rotated with respect to the portrait orientation.
_cameraSource.orientation = AVCaptureVideoOrientationPortrait;
self.cameraSource.orientation = AVCaptureVideoOrientationPortrait;
// request camera access permission
[self.cameraSource requestCameraAccessWithCompletionHandler:^void(BOOL granted) {
if (granted) {
//start pose tracking
[self.poseTracking startWithCamera:self.cameraSource];
}
}];

View File

@ -11,6 +11,7 @@
#import "mediapipe/objc/MPPPlayerInputSource.h"
#import "mediapipe/objc/MPPTimestampConverter.h"
#import "PoseTrackingOptions.h"
#import "PoseTrackingResults.h"
@interface PoseTracking : NSObject<MPPGraphDelegate,MPPInputSourceDelegate>
// The MediaPipe graph currently in use. Initialized in viewDidLoad, started in
@ -41,6 +42,9 @@
// Process camera frames on this queue.
@property(nonatomic) dispatch_queue_t videoQueue;
// Codeblock that runs whenever pose tracking results are available
@property(nonatomic) void(^poseTrackingResultsListener)(PoseTrackingResults*);
- (instancetype) initWithPoseTrackingOptions: (PoseTrackingOptions*) poseTrackingOptions;
- (void) startWithCamera: (MPPCameraInputSource*) cameraSource;
@end

View File

@ -46,6 +46,7 @@ static const char* kLandmarksOutputStream = "pose_landmarks";
self.mediapipeGraph = [[self class] loadGraphFromResource: self.graphName];
self.graphInputStream = "input_video";
if (poseTrackingOptions.showLandmarks){
self.graphOutputStream = "output_video";
}else{
@ -62,7 +63,7 @@ static const char* kLandmarksOutputStream = "pose_landmarks";
self.mediapipeGraph.delegate = self;
self.poseTrackingResultsListener = ^(PoseTrackingResults*){};
return self;
@ -82,6 +83,8 @@ static const char* kLandmarksOutputStream = "pose_landmarks";
}
- (void) startWithCamera: (MPPCameraInputSource*) cameraSource {
[cameraSource setDelegate:self queue:self.videoQueue];
[self startGraph];
// Start fetching frames from the camera.
dispatch_async(self.videoQueue, ^{
@ -124,18 +127,23 @@ static const char* kLandmarksOutputStream = "pose_landmarks";
- (void)mediapipeGraph:(MPPGraph*)graph
didOutputPacket:(const ::mediapipe::Packet&)packet
fromStream:(const std::string&)streamName {
if (streamName == kLandmarksOutputStream) {
if (packet.IsEmpty()) {
NSLog(@"[TS:%lld] No pose landmarks", packet.Timestamp().Value());
self.poseTrackingResultsListener(nil);
return;
}
const auto& landmarks = packet.Get<::mediapipe::NormalizedLandmarkList>();
NSLog(@"[TS:%lld] Number of pose landmarks: %d", packet.Timestamp().Value(),
landmarks.landmark_size());
NSMutableArray<PoseLandmark*>* poseLandmarks = [[NSMutableArray<PoseLandmark*> alloc] init];
for (int i = 0; i < landmarks.landmark_size(); ++i) {
NSLog(@"\tLandmark[%d]: (%f, %f, %f)", i, landmarks.landmark(i).x(),
landmarks.landmark(i).y(), landmarks.landmark(i).z());
[poseLandmarks addObject: [[PoseLandmark alloc] initWithX:landmarks.landmark(i).x() y:landmarks.landmark(i).y() z:landmarks.landmark(i).z() presence:landmarks.landmark(i).presence() visibility:landmarks.landmark(i).visibility()] ];
}
PoseTrackingResults* results = [[PoseTrackingResults alloc] initWithLandmarks:poseLandmarks];
self.poseTrackingResultsListener(results);
}
}
@end

View File

@ -0,0 +1,26 @@
#ifndef MEDIAPIPE_POSETRACKINGRESULTS_H
#define MEDIAPIPE_POSETRACKINGRESULTS_H
#import <Foundation/Foundation.h>
@interface PoseLandmark: NSObject
@property float x;
@property float y;
@property float z;
@property float presence;
@property float visibility;
- (instancetype) initWithX: (float) x y:(float) y z:(float) z presence:(float) presence visibility:(float) visibility;
@end
@interface PoseTrackingResults : NSObject
@property NSArray<PoseLandmark*>* landmarks;
- (instancetype) initWithLandmarks: (NSArray<PoseLandmark*>*) landmarks;
@end
#endif //MEDIAPIPE_POSETRACKINGRESULTS_H

View File

@ -0,0 +1,23 @@
#include "PoseTrackingResults.h"
@implementation PoseLandmark
- (instancetype) initWithX: (float) x y:(float) y z:(float) z presence:(float) presence visibility:(float) visibility{
self.x = x;
self.y = y;
self.z = z;
return self;
}
@end
@implementation PoseTrackingResults
- (instancetype) initWithLandmarks: (NSArray<PoseLandmark*>*) landmarks{
self.landmarks = landmarks;
return self;
}
@end