From d971143040c71123cfaccc8196455d85024b7e80 Mon Sep 17 00:00:00 2001 From: Mautisim Munir Date: Wed, 5 Oct 2022 20:20:15 +0500 Subject: [PATCH] basic pose tracking solutions api complete --- .../Configs/MediaPipe.tulsigen | 2 - .../MediaPipe.tulsiproj/project.tulsiconf | 1 - .../PoseTrackingViewController.mm | 43 +++++++++---------- .../solutions/posetracking_gpu/PoseTracking.h | 4 ++ .../posetracking_gpu/PoseTracking.mm | 22 +++++++--- .../posetracking_gpu/PoseTrackingResults.h | 26 +++++++++++ .../posetracking_gpu/PoseTrackingResults.mm | 23 ++++++++++ 7 files changed, 88 insertions(+), 33 deletions(-) create mode 100644 mediapipe/objc/solutions/posetracking_gpu/PoseTrackingResults.h create mode 100644 mediapipe/objc/solutions/posetracking_gpu/PoseTrackingResults.mm diff --git a/mediapipe/MediaPipe.tulsiproj/Configs/MediaPipe.tulsigen b/mediapipe/MediaPipe.tulsiproj/Configs/MediaPipe.tulsigen index cbb494369..68b3d405b 100644 --- a/mediapipe/MediaPipe.tulsiproj/Configs/MediaPipe.tulsigen +++ b/mediapipe/MediaPipe.tulsiproj/Configs/MediaPipe.tulsigen @@ -23,8 +23,6 @@ "mediapipe/objc/testing/app/BUILD" ], "buildTargets" : [ - "//mediapipe/examples/ios/common:CommonMediaPipeAppLibrary", - "//mediapipe/examples/ios/posetrackingsolutiongpu:CommonMediaPipeAppLibrary", "//mediapipe/examples/ios/posetrackingsolutiongpu:PoseTrackingSolutionGpuApp" ], "optionSet" : { diff --git a/mediapipe/MediaPipe.tulsiproj/project.tulsiconf b/mediapipe/MediaPipe.tulsiproj/project.tulsiconf index 6393f863b..84480e106 100644 --- a/mediapipe/MediaPipe.tulsiproj/project.tulsiconf +++ b/mediapipe/MediaPipe.tulsiproj/project.tulsiconf @@ -13,7 +13,6 @@ "", "mediapipe", "mediapipe/examples/ios", - "mediapipe/examples/ios/common", "mediapipe/examples/ios/facedetectioncpu", "mediapipe/examples/ios/facedetectiongpu", "mediapipe/examples/ios/faceeffect", diff --git a/mediapipe/examples/ios/posetrackingsolutiongpu/PoseTrackingViewController.mm b/mediapipe/examples/ios/posetrackingsolutiongpu/PoseTrackingViewController.mm index a368fa68f..de49d7461 100644 --- a/mediapipe/examples/ios/posetrackingsolutiongpu/PoseTrackingViewController.mm +++ b/mediapipe/examples/ios/posetrackingsolutiongpu/PoseTrackingViewController.mm @@ -28,20 +28,21 @@ static const char* kLandmarksOutputStream = "pose_landmarks"; - (void)viewDidLoad { - [super viewDidLoad]; - PoseTrackingOptions* options = [ [PoseTrackingOptions alloc] initWithShowLandmarks:true cameraRotation:0]; + [super viewDidLoad]; + + // create pose tracking options + PoseTrackingOptions* options = [ [PoseTrackingOptions alloc] initWithShowLandmarks:true cameraRotation:0]; + // create pose tracking from options self.poseTracking = [[PoseTracking alloc] initWithPoseTrackingOptions:options]; - + // render pose tracking to a UIView (self.liveView) self.poseTracking.renderer.layer.frame = self.liveView.layer.bounds; [self.liveView.layer addSublayer:self.poseTracking.renderer.layer]; - - - - - - - + // create a block to run when PoseTrackingResults are available + self.poseTracking.poseTrackingResultsListener = ^(PoseTrackingResults* results){ + NSLog(@"\tLandmark[%d]: (%f, %f, %f)", 0, results.landmarks[0].x,results.landmarks[0].y,results.landmarks[0].z); + + }; } @@ -53,25 +54,21 @@ static const char* kLandmarksOutputStream = "pose_landmarks"; // depending on the application navigation flow in that case. - (void)viewWillAppear:(BOOL)animated { [super viewWillAppear:animated]; - - self.cameraSource = [[MPPCameraInputSource alloc] init]; - [self.cameraSource setDelegate:self.poseTracking queue:self.poseTracking.videoQueue]; - self.cameraSource.sessionPreset = AVCaptureSessionPresetHigh; - - self.cameraSource.cameraPosition = AVCaptureDevicePositionBack; - -// self.cameraSource.cameraPosition = AVCaptureDevicePositionFront; -// // When using the front camera, mirror the input for a more natural look. -// _cameraSource.videoMirrored = YES; - + // create and set camera options + self.cameraSource = [[MPPCameraInputSource alloc] init]; + self.cameraSource.sessionPreset = AVCaptureSessionPresetHigh; + self.cameraSource.cameraPosition = AVCaptureDevicePositionBack; + // When using the front camera, mirror the input for a more natural look. + //self.cameraSource.videoMirrored = YES; // The frame's native format is rotated with respect to the portrait orientation. - _cameraSource.orientation = AVCaptureVideoOrientationPortrait; + self.cameraSource.orientation = AVCaptureVideoOrientationPortrait; + // request camera access permission [self.cameraSource requestCameraAccessWithCompletionHandler:^void(BOOL granted) { if (granted) { - + //start pose tracking [self.poseTracking startWithCamera:self.cameraSource]; } }]; diff --git a/mediapipe/objc/solutions/posetracking_gpu/PoseTracking.h b/mediapipe/objc/solutions/posetracking_gpu/PoseTracking.h index 9cfe546b6..1953e1530 100644 --- a/mediapipe/objc/solutions/posetracking_gpu/PoseTracking.h +++ b/mediapipe/objc/solutions/posetracking_gpu/PoseTracking.h @@ -11,6 +11,7 @@ #import "mediapipe/objc/MPPPlayerInputSource.h" #import "mediapipe/objc/MPPTimestampConverter.h" #import "PoseTrackingOptions.h" +#import "PoseTrackingResults.h" @interface PoseTracking : NSObject // The MediaPipe graph currently in use. Initialized in viewDidLoad, started in @@ -41,6 +42,9 @@ // Process camera frames on this queue. @property(nonatomic) dispatch_queue_t videoQueue; +// Codeblock that runs whenever pose tracking results are available +@property(nonatomic) void(^poseTrackingResultsListener)(PoseTrackingResults*); + - (instancetype) initWithPoseTrackingOptions: (PoseTrackingOptions*) poseTrackingOptions; - (void) startWithCamera: (MPPCameraInputSource*) cameraSource; @end diff --git a/mediapipe/objc/solutions/posetracking_gpu/PoseTracking.mm b/mediapipe/objc/solutions/posetracking_gpu/PoseTracking.mm index 8403c1187..d58c4e394 100644 --- a/mediapipe/objc/solutions/posetracking_gpu/PoseTracking.mm +++ b/mediapipe/objc/solutions/posetracking_gpu/PoseTracking.mm @@ -45,7 +45,8 @@ static const char* kLandmarksOutputStream = "pose_landmarks"; self.graphName = @"pose_tracking_gpu"; self.mediapipeGraph = [[self class] loadGraphFromResource: self.graphName]; self.graphInputStream = "input_video"; - + + if (poseTrackingOptions.showLandmarks){ self.graphOutputStream = "output_video"; }else{ @@ -62,7 +63,7 @@ static const char* kLandmarksOutputStream = "pose_landmarks"; self.mediapipeGraph.delegate = self; - + self.poseTrackingResultsListener = ^(PoseTrackingResults*){}; return self; @@ -82,6 +83,8 @@ static const char* kLandmarksOutputStream = "pose_landmarks"; } - (void) startWithCamera: (MPPCameraInputSource*) cameraSource { + [cameraSource setDelegate:self queue:self.videoQueue]; + [self startGraph]; // Start fetching frames from the camera. dispatch_async(self.videoQueue, ^{ @@ -124,18 +127,23 @@ static const char* kLandmarksOutputStream = "pose_landmarks"; - (void)mediapipeGraph:(MPPGraph*)graph didOutputPacket:(const ::mediapipe::Packet&)packet fromStream:(const std::string&)streamName { + if (streamName == kLandmarksOutputStream) { + + if (packet.IsEmpty()) { - NSLog(@"[TS:%lld] No pose landmarks", packet.Timestamp().Value()); + self.poseTrackingResultsListener(nil); return; } const auto& landmarks = packet.Get<::mediapipe::NormalizedLandmarkList>(); - NSLog(@"[TS:%lld] Number of pose landmarks: %d", packet.Timestamp().Value(), - landmarks.landmark_size()); + NSMutableArray* poseLandmarks = [[NSMutableArray alloc] init]; for (int i = 0; i < landmarks.landmark_size(); ++i) { - NSLog(@"\tLandmark[%d]: (%f, %f, %f)", i, landmarks.landmark(i).x(), - landmarks.landmark(i).y(), landmarks.landmark(i).z()); + + [poseLandmarks addObject: [[PoseLandmark alloc] initWithX:landmarks.landmark(i).x() y:landmarks.landmark(i).y() z:landmarks.landmark(i).z() presence:landmarks.landmark(i).presence() visibility:landmarks.landmark(i).visibility()] ]; } + PoseTrackingResults* results = [[PoseTrackingResults alloc] initWithLandmarks:poseLandmarks]; + self.poseTrackingResultsListener(results); } + } @end diff --git a/mediapipe/objc/solutions/posetracking_gpu/PoseTrackingResults.h b/mediapipe/objc/solutions/posetracking_gpu/PoseTrackingResults.h new file mode 100644 index 000000000..c4481121d --- /dev/null +++ b/mediapipe/objc/solutions/posetracking_gpu/PoseTrackingResults.h @@ -0,0 +1,26 @@ +#ifndef MEDIAPIPE_POSETRACKINGRESULTS_H +#define MEDIAPIPE_POSETRACKINGRESULTS_H + +#import +@interface PoseLandmark: NSObject + +@property float x; +@property float y; +@property float z; +@property float presence; +@property float visibility; + +- (instancetype) initWithX: (float) x y:(float) y z:(float) z presence:(float) presence visibility:(float) visibility; + +@end + +@interface PoseTrackingResults : NSObject + + +@property NSArray* landmarks; + +- (instancetype) initWithLandmarks: (NSArray*) landmarks; + +@end + +#endif //MEDIAPIPE_POSETRACKINGRESULTS_H diff --git a/mediapipe/objc/solutions/posetracking_gpu/PoseTrackingResults.mm b/mediapipe/objc/solutions/posetracking_gpu/PoseTrackingResults.mm new file mode 100644 index 000000000..744b2dde4 --- /dev/null +++ b/mediapipe/objc/solutions/posetracking_gpu/PoseTrackingResults.mm @@ -0,0 +1,23 @@ +#include "PoseTrackingResults.h" + + +@implementation PoseLandmark + +- (instancetype) initWithX: (float) x y:(float) y z:(float) z presence:(float) presence visibility:(float) visibility{ + self.x = x; + self.y = y; + self.z = z; + return self; +} + +@end + + +@implementation PoseTrackingResults + +- (instancetype) initWithLandmarks: (NSArray*) landmarks{ + self.landmarks = landmarks; + return self; +} + +@end