basic pose tracking solutions api complete
This commit is contained in:
parent
f7f3329505
commit
d971143040
|
@ -23,8 +23,6 @@
|
||||||
"mediapipe/objc/testing/app/BUILD"
|
"mediapipe/objc/testing/app/BUILD"
|
||||||
],
|
],
|
||||||
"buildTargets" : [
|
"buildTargets" : [
|
||||||
"//mediapipe/examples/ios/common:CommonMediaPipeAppLibrary",
|
|
||||||
"//mediapipe/examples/ios/posetrackingsolutiongpu:CommonMediaPipeAppLibrary",
|
|
||||||
"//mediapipe/examples/ios/posetrackingsolutiongpu:PoseTrackingSolutionGpuApp"
|
"//mediapipe/examples/ios/posetrackingsolutiongpu:PoseTrackingSolutionGpuApp"
|
||||||
],
|
],
|
||||||
"optionSet" : {
|
"optionSet" : {
|
||||||
|
|
|
@ -13,7 +13,6 @@
|
||||||
"",
|
"",
|
||||||
"mediapipe",
|
"mediapipe",
|
||||||
"mediapipe/examples/ios",
|
"mediapipe/examples/ios",
|
||||||
"mediapipe/examples/ios/common",
|
|
||||||
"mediapipe/examples/ios/facedetectioncpu",
|
"mediapipe/examples/ios/facedetectioncpu",
|
||||||
"mediapipe/examples/ios/facedetectiongpu",
|
"mediapipe/examples/ios/facedetectiongpu",
|
||||||
"mediapipe/examples/ios/faceeffect",
|
"mediapipe/examples/ios/faceeffect",
|
||||||
|
|
|
@ -28,20 +28,21 @@ static const char* kLandmarksOutputStream = "pose_landmarks";
|
||||||
- (void)viewDidLoad {
|
- (void)viewDidLoad {
|
||||||
|
|
||||||
|
|
||||||
[super viewDidLoad];
|
[super viewDidLoad];
|
||||||
PoseTrackingOptions* options = [ [PoseTrackingOptions alloc] initWithShowLandmarks:true cameraRotation:0];
|
|
||||||
|
// create pose tracking options
|
||||||
|
PoseTrackingOptions* options = [ [PoseTrackingOptions alloc] initWithShowLandmarks:true cameraRotation:0];
|
||||||
|
// create pose tracking from options
|
||||||
self.poseTracking = [[PoseTracking alloc] initWithPoseTrackingOptions:options];
|
self.poseTracking = [[PoseTracking alloc] initWithPoseTrackingOptions:options];
|
||||||
|
// render pose tracking to a UIView (self.liveView)
|
||||||
self.poseTracking.renderer.layer.frame = self.liveView.layer.bounds;
|
self.poseTracking.renderer.layer.frame = self.liveView.layer.bounds;
|
||||||
[self.liveView.layer addSublayer:self.poseTracking.renderer.layer];
|
[self.liveView.layer addSublayer:self.poseTracking.renderer.layer];
|
||||||
|
|
||||||
|
// create a block to run when PoseTrackingResults are available
|
||||||
|
self.poseTracking.poseTrackingResultsListener = ^(PoseTrackingResults* results){
|
||||||
|
NSLog(@"\tLandmark[%d]: (%f, %f, %f)", 0, results.landmarks[0].x,results.landmarks[0].y,results.landmarks[0].z);
|
||||||
|
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -53,25 +54,21 @@ static const char* kLandmarksOutputStream = "pose_landmarks";
|
||||||
// depending on the application navigation flow in that case.
|
// depending on the application navigation flow in that case.
|
||||||
- (void)viewWillAppear:(BOOL)animated {
|
- (void)viewWillAppear:(BOOL)animated {
|
||||||
[super viewWillAppear:animated];
|
[super viewWillAppear:animated];
|
||||||
|
|
||||||
self.cameraSource = [[MPPCameraInputSource alloc] init];
|
|
||||||
[self.cameraSource setDelegate:self.poseTracking queue:self.poseTracking.videoQueue];
|
|
||||||
self.cameraSource.sessionPreset = AVCaptureSessionPresetHigh;
|
|
||||||
|
|
||||||
|
// create and set camera options
|
||||||
self.cameraSource.cameraPosition = AVCaptureDevicePositionBack;
|
self.cameraSource = [[MPPCameraInputSource alloc] init];
|
||||||
|
self.cameraSource.sessionPreset = AVCaptureSessionPresetHigh;
|
||||||
// self.cameraSource.cameraPosition = AVCaptureDevicePositionFront;
|
self.cameraSource.cameraPosition = AVCaptureDevicePositionBack;
|
||||||
// // When using the front camera, mirror the input for a more natural look.
|
// When using the front camera, mirror the input for a more natural look.
|
||||||
// _cameraSource.videoMirrored = YES;
|
//self.cameraSource.videoMirrored = YES;
|
||||||
|
|
||||||
|
|
||||||
// The frame's native format is rotated with respect to the portrait orientation.
|
// The frame's native format is rotated with respect to the portrait orientation.
|
||||||
_cameraSource.orientation = AVCaptureVideoOrientationPortrait;
|
self.cameraSource.orientation = AVCaptureVideoOrientationPortrait;
|
||||||
|
|
||||||
|
// request camera access permission
|
||||||
[self.cameraSource requestCameraAccessWithCompletionHandler:^void(BOOL granted) {
|
[self.cameraSource requestCameraAccessWithCompletionHandler:^void(BOOL granted) {
|
||||||
if (granted) {
|
if (granted) {
|
||||||
|
//start pose tracking
|
||||||
[self.poseTracking startWithCamera:self.cameraSource];
|
[self.poseTracking startWithCamera:self.cameraSource];
|
||||||
}
|
}
|
||||||
}];
|
}];
|
||||||
|
|
|
@ -11,6 +11,7 @@
|
||||||
#import "mediapipe/objc/MPPPlayerInputSource.h"
|
#import "mediapipe/objc/MPPPlayerInputSource.h"
|
||||||
#import "mediapipe/objc/MPPTimestampConverter.h"
|
#import "mediapipe/objc/MPPTimestampConverter.h"
|
||||||
#import "PoseTrackingOptions.h"
|
#import "PoseTrackingOptions.h"
|
||||||
|
#import "PoseTrackingResults.h"
|
||||||
@interface PoseTracking : NSObject<MPPGraphDelegate,MPPInputSourceDelegate>
|
@interface PoseTracking : NSObject<MPPGraphDelegate,MPPInputSourceDelegate>
|
||||||
|
|
||||||
// The MediaPipe graph currently in use. Initialized in viewDidLoad, started in
|
// The MediaPipe graph currently in use. Initialized in viewDidLoad, started in
|
||||||
|
@ -41,6 +42,9 @@
|
||||||
// Process camera frames on this queue.
|
// Process camera frames on this queue.
|
||||||
@property(nonatomic) dispatch_queue_t videoQueue;
|
@property(nonatomic) dispatch_queue_t videoQueue;
|
||||||
|
|
||||||
|
// Codeblock that runs whenever pose tracking results are available
|
||||||
|
@property(nonatomic) void(^poseTrackingResultsListener)(PoseTrackingResults*);
|
||||||
|
|
||||||
- (instancetype) initWithPoseTrackingOptions: (PoseTrackingOptions*) poseTrackingOptions;
|
- (instancetype) initWithPoseTrackingOptions: (PoseTrackingOptions*) poseTrackingOptions;
|
||||||
- (void) startWithCamera: (MPPCameraInputSource*) cameraSource;
|
- (void) startWithCamera: (MPPCameraInputSource*) cameraSource;
|
||||||
@end
|
@end
|
||||||
|
|
|
@ -45,7 +45,8 @@ static const char* kLandmarksOutputStream = "pose_landmarks";
|
||||||
self.graphName = @"pose_tracking_gpu";
|
self.graphName = @"pose_tracking_gpu";
|
||||||
self.mediapipeGraph = [[self class] loadGraphFromResource: self.graphName];
|
self.mediapipeGraph = [[self class] loadGraphFromResource: self.graphName];
|
||||||
self.graphInputStream = "input_video";
|
self.graphInputStream = "input_video";
|
||||||
|
|
||||||
|
|
||||||
if (poseTrackingOptions.showLandmarks){
|
if (poseTrackingOptions.showLandmarks){
|
||||||
self.graphOutputStream = "output_video";
|
self.graphOutputStream = "output_video";
|
||||||
}else{
|
}else{
|
||||||
|
@ -62,7 +63,7 @@ static const char* kLandmarksOutputStream = "pose_landmarks";
|
||||||
|
|
||||||
self.mediapipeGraph.delegate = self;
|
self.mediapipeGraph.delegate = self;
|
||||||
|
|
||||||
|
self.poseTrackingResultsListener = ^(PoseTrackingResults*){};
|
||||||
|
|
||||||
|
|
||||||
return self;
|
return self;
|
||||||
|
@ -82,6 +83,8 @@ static const char* kLandmarksOutputStream = "pose_landmarks";
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void) startWithCamera: (MPPCameraInputSource*) cameraSource {
|
- (void) startWithCamera: (MPPCameraInputSource*) cameraSource {
|
||||||
|
[cameraSource setDelegate:self queue:self.videoQueue];
|
||||||
|
|
||||||
[self startGraph];
|
[self startGraph];
|
||||||
// Start fetching frames from the camera.
|
// Start fetching frames from the camera.
|
||||||
dispatch_async(self.videoQueue, ^{
|
dispatch_async(self.videoQueue, ^{
|
||||||
|
@ -124,18 +127,23 @@ static const char* kLandmarksOutputStream = "pose_landmarks";
|
||||||
- (void)mediapipeGraph:(MPPGraph*)graph
|
- (void)mediapipeGraph:(MPPGraph*)graph
|
||||||
didOutputPacket:(const ::mediapipe::Packet&)packet
|
didOutputPacket:(const ::mediapipe::Packet&)packet
|
||||||
fromStream:(const std::string&)streamName {
|
fromStream:(const std::string&)streamName {
|
||||||
|
|
||||||
if (streamName == kLandmarksOutputStream) {
|
if (streamName == kLandmarksOutputStream) {
|
||||||
|
|
||||||
|
|
||||||
if (packet.IsEmpty()) {
|
if (packet.IsEmpty()) {
|
||||||
NSLog(@"[TS:%lld] No pose landmarks", packet.Timestamp().Value());
|
self.poseTrackingResultsListener(nil);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const auto& landmarks = packet.Get<::mediapipe::NormalizedLandmarkList>();
|
const auto& landmarks = packet.Get<::mediapipe::NormalizedLandmarkList>();
|
||||||
NSLog(@"[TS:%lld] Number of pose landmarks: %d", packet.Timestamp().Value(),
|
NSMutableArray<PoseLandmark*>* poseLandmarks = [[NSMutableArray<PoseLandmark*> alloc] init];
|
||||||
landmarks.landmark_size());
|
|
||||||
for (int i = 0; i < landmarks.landmark_size(); ++i) {
|
for (int i = 0; i < landmarks.landmark_size(); ++i) {
|
||||||
NSLog(@"\tLandmark[%d]: (%f, %f, %f)", i, landmarks.landmark(i).x(),
|
|
||||||
landmarks.landmark(i).y(), landmarks.landmark(i).z());
|
[poseLandmarks addObject: [[PoseLandmark alloc] initWithX:landmarks.landmark(i).x() y:landmarks.landmark(i).y() z:landmarks.landmark(i).z() presence:landmarks.landmark(i).presence() visibility:landmarks.landmark(i).visibility()] ];
|
||||||
}
|
}
|
||||||
|
PoseTrackingResults* results = [[PoseTrackingResults alloc] initWithLandmarks:poseLandmarks];
|
||||||
|
self.poseTrackingResultsListener(results);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
@end
|
@end
|
||||||
|
|
|
@ -0,0 +1,26 @@
|
||||||
|
#ifndef MEDIAPIPE_POSETRACKINGRESULTS_H
|
||||||
|
#define MEDIAPIPE_POSETRACKINGRESULTS_H
|
||||||
|
|
||||||
|
#import <Foundation/Foundation.h>
|
||||||
|
@interface PoseLandmark: NSObject
|
||||||
|
|
||||||
|
@property float x;
|
||||||
|
@property float y;
|
||||||
|
@property float z;
|
||||||
|
@property float presence;
|
||||||
|
@property float visibility;
|
||||||
|
|
||||||
|
- (instancetype) initWithX: (float) x y:(float) y z:(float) z presence:(float) presence visibility:(float) visibility;
|
||||||
|
|
||||||
|
@end
|
||||||
|
|
||||||
|
@interface PoseTrackingResults : NSObject
|
||||||
|
|
||||||
|
|
||||||
|
@property NSArray<PoseLandmark*>* landmarks;
|
||||||
|
|
||||||
|
- (instancetype) initWithLandmarks: (NSArray<PoseLandmark*>*) landmarks;
|
||||||
|
|
||||||
|
@end
|
||||||
|
|
||||||
|
#endif //MEDIAPIPE_POSETRACKINGRESULTS_H
|
|
@ -0,0 +1,23 @@
|
||||||
|
#include "PoseTrackingResults.h"
|
||||||
|
|
||||||
|
|
||||||
|
@implementation PoseLandmark
|
||||||
|
|
||||||
|
- (instancetype) initWithX: (float) x y:(float) y z:(float) z presence:(float) presence visibility:(float) visibility{
|
||||||
|
self.x = x;
|
||||||
|
self.y = y;
|
||||||
|
self.z = z;
|
||||||
|
return self;
|
||||||
|
}
|
||||||
|
|
||||||
|
@end
|
||||||
|
|
||||||
|
|
||||||
|
@implementation PoseTrackingResults
|
||||||
|
|
||||||
|
- (instancetype) initWithLandmarks: (NSArray<PoseLandmark*>*) landmarks{
|
||||||
|
self.landmarks = landmarks;
|
||||||
|
return self;
|
||||||
|
}
|
||||||
|
|
||||||
|
@end
|
Loading…
Reference in New Issue
Block a user