posetracking objective c api update
This commit is contained in:
parent
caf4256e00
commit
fc2bcf67ec
|
@ -527,11 +527,11 @@ load("@build_bazel_rules_android//android:rules.bzl", "android_ndk_repository",
|
||||||
android_sdk_repository(
|
android_sdk_repository(
|
||||||
name = "androidsdk",
|
name = "androidsdk",
|
||||||
build_tools_version = "30.0.3",
|
build_tools_version = "30.0.3",
|
||||||
# path = "/Users/tj/Library/Android/sdk", # Path to Android SDK, optional if $ANDROID_HOME is set
|
path = "/Users/tj/Library/Android/sdk", # Path to Android SDK, optional if $ANDROID_HOME is set
|
||||||
)
|
)
|
||||||
|
|
||||||
android_ndk_repository(
|
android_ndk_repository(
|
||||||
name = "androidndk", # Required. Name *must* be "androidndk".
|
name = "androidndk", # Required. Name *must* be "androidndk".
|
||||||
api_level = 21,
|
api_level = 21,
|
||||||
# path = "/Users/tj/Library/Android/sdk/ndk/21.4.7075529", # Optional. Can be omitted if `ANDROID_NDK_HOME` environment variable is set.
|
path = "/Users/tj/Library/Android/sdk/ndk/21.4.7075529", # Optional. Can be omitted if `ANDROID_NDK_HOME` environment variable is set.
|
||||||
)
|
)
|
||||||
|
|
|
@ -23,7 +23,10 @@
|
||||||
"mediapipe/objc/testing/app/BUILD"
|
"mediapipe/objc/testing/app/BUILD"
|
||||||
],
|
],
|
||||||
"buildTargets" : [
|
"buildTargets" : [
|
||||||
"//mediapipe/examples/ios/posetrackingsolutiongpu:PoseTrackingSolutionGpuApp"
|
"//mediapipe/examples/ios/posetrackingsolution-swift:app_lib",
|
||||||
|
"//mediapipe/examples/ios/posetrackingsolution-swift:posetracking-solution-swift",
|
||||||
|
"//mediapipe/examples/ios/posetrackingsolutiongpu:PoseTrackingSolutionGpuApp",
|
||||||
|
"//mediapipe/swift/solutions/lindera:Lindera"
|
||||||
],
|
],
|
||||||
"optionSet" : {
|
"optionSet" : {
|
||||||
"BazelBuildOptionsDebug" : {
|
"BazelBuildOptionsDebug" : {
|
||||||
|
@ -91,6 +94,10 @@
|
||||||
"mediapipe/examples/ios/objectdetectioncpu",
|
"mediapipe/examples/ios/objectdetectioncpu",
|
||||||
"mediapipe/examples/ios/objectdetectiongpu",
|
"mediapipe/examples/ios/objectdetectiongpu",
|
||||||
"mediapipe/examples/ios/posetrackinggpu",
|
"mediapipe/examples/ios/posetrackinggpu",
|
||||||
|
"mediapipe/examples/ios/posetrackingsolution-swift",
|
||||||
|
"mediapipe/examples/ios/posetrackingsolution-swift/Camera",
|
||||||
|
"mediapipe/examples/ios/posetrackingsolution-swift/ViewModels",
|
||||||
|
"mediapipe/examples/ios/posetrackingsolution-swift/Views",
|
||||||
"mediapipe/examples/ios/posetrackingsolutiongpu",
|
"mediapipe/examples/ios/posetrackingsolutiongpu",
|
||||||
"mediapipe/examples/ios/posetrackingsolutiongpu/Base.lproj",
|
"mediapipe/examples/ios/posetrackingsolutiongpu/Base.lproj",
|
||||||
"mediapipe/examples/ios/selfiesegmentationgpu",
|
"mediapipe/examples/ios/selfiesegmentationgpu",
|
||||||
|
@ -117,6 +124,9 @@
|
||||||
"mediapipe/objc",
|
"mediapipe/objc",
|
||||||
"mediapipe/objc/solutions",
|
"mediapipe/objc/solutions",
|
||||||
"mediapipe/objc/solutions/posetracking_gpu",
|
"mediapipe/objc/solutions/posetracking_gpu",
|
||||||
|
"mediapipe/swift",
|
||||||
|
"mediapipe/swift/solutions",
|
||||||
|
"mediapipe/swift/solutions/lindera",
|
||||||
"mediapipe/util",
|
"mediapipe/util",
|
||||||
"mediapipe/util/android",
|
"mediapipe/util/android",
|
||||||
"mediapipe/util/android/file",
|
"mediapipe/util/android/file",
|
||||||
|
|
|
@ -25,10 +25,12 @@
|
||||||
"mediapipe/examples/ios/objectdetectiongpu",
|
"mediapipe/examples/ios/objectdetectiongpu",
|
||||||
"mediapipe/examples/ios/objectdetectiontrackinggpu",
|
"mediapipe/examples/ios/objectdetectiontrackinggpu",
|
||||||
"mediapipe/examples/ios/posetrackinggpu",
|
"mediapipe/examples/ios/posetrackinggpu",
|
||||||
|
"mediapipe/examples/ios/posetrackingsolution-swift",
|
||||||
"mediapipe/examples/ios/posetrackingsolutiongpu",
|
"mediapipe/examples/ios/posetrackingsolutiongpu",
|
||||||
"mediapipe/examples/ios/selfiesegmentationgpu",
|
"mediapipe/examples/ios/selfiesegmentationgpu",
|
||||||
"mediapipe/objc",
|
"mediapipe/objc",
|
||||||
"mediapipe/objc/solutions/posetracking_gpu"
|
"mediapipe/objc/solutions/posetracking_gpu",
|
||||||
|
"mediapipe/swift/solutions/lindera"
|
||||||
],
|
],
|
||||||
"projectName" : "Mediapipe",
|
"projectName" : "Mediapipe",
|
||||||
"workspaceRoot" : "../.."
|
"workspaceRoot" : "../.."
|
||||||
|
|
|
@ -4,6 +4,7 @@ objc_library(
|
||||||
"*.h",
|
"*.h",
|
||||||
"*.mm",
|
"*.mm",
|
||||||
]),
|
]),
|
||||||
|
module_name = "MPPoseTracking",
|
||||||
hdrs = [
|
hdrs = [
|
||||||
"PoseTracking.h",
|
"PoseTracking.h",
|
||||||
"PoseTrackingOptions.h",
|
"PoseTrackingOptions.h",
|
||||||
|
|
|
@ -6,17 +6,16 @@
|
||||||
#define MEDIAPIPE_POSETRACKING_H
|
#define MEDIAPIPE_POSETRACKING_H
|
||||||
#import <Foundation/Foundation.h>
|
#import <Foundation/Foundation.h>
|
||||||
#import "mediapipe/objc/MPPCameraInputSource.h"
|
#import "mediapipe/objc/MPPCameraInputSource.h"
|
||||||
#import "mediapipe/objc/MPPGraph.h"
|
|
||||||
#import "mediapipe/objc/MPPLayerRenderer.h"
|
#import "mediapipe/objc/MPPLayerRenderer.h"
|
||||||
#import "mediapipe/objc/MPPPlayerInputSource.h"
|
#import "mediapipe/objc/MPPPlayerInputSource.h"
|
||||||
#import "mediapipe/objc/MPPTimestampConverter.h"
|
#import "mediapipe/objc/MPPTimestampConverter.h"
|
||||||
#import "PoseTrackingOptions.h"
|
#import "PoseTrackingOptions.h"
|
||||||
#import "PoseTrackingResults.h"
|
#import "PoseTrackingResults.h"
|
||||||
@interface PoseTracking : NSObject<MPPGraphDelegate,MPPInputSourceDelegate>
|
@interface PoseTracking : NSObject<MPPInputSourceDelegate>
|
||||||
|
|
||||||
// The MediaPipe graph currently in use. Initialized in viewDidLoad, started in
|
// The MediaPipe graph currently in use. Initialized in viewDidLoad, started in
|
||||||
// viewWillAppear: and sent video frames on videoQueue.
|
// viewWillAppear: and sent video frames on videoQueue.
|
||||||
@property(nonatomic) MPPGraph* mediapipeGraph;
|
//@property(nonatomic) MPPGraph* mediapipeGraph;
|
||||||
|
|
||||||
|
|
||||||
// Helps to convert timestamp.
|
// Helps to convert timestamp.
|
||||||
|
|
|
@ -1,9 +1,96 @@
|
||||||
#include "PoseTracking.h"
|
#include "PoseTracking.h"
|
||||||
#include "mediapipe/framework/formats/landmark.pb.h"
|
#include "mediapipe/framework/formats/landmark.pb.h"
|
||||||
|
#import "mediapipe/objc/MPPGraph.h"
|
||||||
|
|
||||||
|
|
||||||
static const char* kVideoQueueLabel = "com.google.mediapipe.example.videoQueue";
|
static const char* kVideoQueueLabel = "com.google.mediapipe.example.videoQueue";
|
||||||
static const char* kLandmarksOutputStream = "pose_landmarks";
|
static const char* kLandmarksOutputStream = "pose_landmarks";
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# pragma mark - PoseTrackingGraphDelegate Interface
|
||||||
|
@interface PoseTrackingGraphDelegate : NSObject<MPPGraphDelegate>
|
||||||
|
// Receives CVPixelBufferRef from the MediaPipe graph. Invoked on a MediaPipe worker thread.
|
||||||
|
@property (nonatomic) MPPGraph* mediapipeGraph;
|
||||||
|
@property (nonatomic) const char* graphOutputStream;
|
||||||
|
@property (nonatomic) MPPLayerRenderer* renderer;
|
||||||
|
@property(nonatomic) void(^poseTrackingResultsListener)(PoseTrackingResults*);
|
||||||
|
|
||||||
|
-(id) initWithMediapipeGraph: (MPPGraph*) graph graphOutputStream: (const char*) graphOutputStream
|
||||||
|
renderer: (MPPLayerRenderer*) renderer;
|
||||||
|
- (void)mediapipeGraph:(MPPGraph*)graph
|
||||||
|
didOutputPixelBuffer:(CVPixelBufferRef)pixelBuffer
|
||||||
|
fromStream:(const std::string&)streamName ;
|
||||||
|
- (void)mediapipeGraph:(MPPGraph*)graph
|
||||||
|
didOutputPacket:(const ::mediapipe::Packet&)packet
|
||||||
|
fromStream:(const std::string&)streamName ;
|
||||||
|
|
||||||
|
@end
|
||||||
|
|
||||||
|
# pragma mark - PoseTrackingGraphDelegate Implementation
|
||||||
|
|
||||||
|
@implementation PoseTrackingGraphDelegate
|
||||||
|
|
||||||
|
-(id) initWithMediapipeGraph: (MPPGraph*) graph graphOutputStream: (const char*) graphOutputStream
|
||||||
|
renderer: (MPPLayerRenderer*) renderer
|
||||||
|
{
|
||||||
|
|
||||||
|
self.mediapipeGraph = graph;
|
||||||
|
self.graphOutputStream =graphOutputStream;
|
||||||
|
self.renderer = renderer;
|
||||||
|
return self;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Receives CVPixelBufferRef from the MediaPipe graph. Invoked on a MediaPipe worker thread.
|
||||||
|
- (void)mediapipeGraph:(MPPGraph*)graph
|
||||||
|
didOutputPixelBuffer:(CVPixelBufferRef)pixelBuffer
|
||||||
|
fromStream:(const std::string&)streamName {
|
||||||
|
if (streamName == self.graphOutputStream) {
|
||||||
|
// Display the captured image on the screen.
|
||||||
|
CVPixelBufferRetain(pixelBuffer);
|
||||||
|
dispatch_async(dispatch_get_main_queue(), ^{
|
||||||
|
[self.renderer renderPixelBuffer:pixelBuffer];
|
||||||
|
CVPixelBufferRelease(pixelBuffer);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// Receives a raw packet from the MediaPipe graph. Invoked on a MediaPipe worker thread.
|
||||||
|
- (void)mediapipeGraph:(MPPGraph*)graph
|
||||||
|
didOutputPacket:(const ::mediapipe::Packet&)packet
|
||||||
|
fromStream:(const std::string&)streamName {
|
||||||
|
|
||||||
|
if (streamName == kLandmarksOutputStream) {
|
||||||
|
|
||||||
|
|
||||||
|
if (packet.IsEmpty()) {
|
||||||
|
self.poseTrackingResultsListener(nil);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const auto& landmarks = packet.Get<::mediapipe::NormalizedLandmarkList>();
|
||||||
|
NSMutableArray<PoseLandmark*>* poseLandmarks = [[NSMutableArray<PoseLandmark*> alloc] init];
|
||||||
|
for (int i = 0; i < landmarks.landmark_size(); ++i) {
|
||||||
|
|
||||||
|
[poseLandmarks addObject: [[PoseLandmark alloc] initWithX:landmarks.landmark(i).x() y:landmarks.landmark(i).y() z:landmarks.landmark(i).z() presence:landmarks.landmark(i).presence() visibility:landmarks.landmark(i).visibility()] ];
|
||||||
|
}
|
||||||
|
PoseTrackingResults* results = [[PoseTrackingResults alloc] initWithLandmarks:poseLandmarks];
|
||||||
|
self.poseTrackingResultsListener(results);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@end
|
||||||
|
|
||||||
|
@interface PoseTracking(){
|
||||||
|
// The MediaPipe graph currently in use. Initialized in viewDidLoad, started in
|
||||||
|
// viewWillAppear: and sent video frames on videoQueue.
|
||||||
|
MPPGraph* mediapipeGraph;
|
||||||
|
PoseTrackingGraphDelegate* poseTrackingGraphDelegate;
|
||||||
|
}
|
||||||
|
|
||||||
|
@end
|
||||||
|
|
||||||
@implementation PoseTracking
|
@implementation PoseTracking
|
||||||
|
|
||||||
#pragma mark - MediaPipe graph methods
|
#pragma mark - MediaPipe graph methods
|
||||||
|
@ -43,7 +130,7 @@ static const char* kLandmarksOutputStream = "pose_landmarks";
|
||||||
|
|
||||||
self.poseTrackingOptions = poseTrackingOptions;
|
self.poseTrackingOptions = poseTrackingOptions;
|
||||||
self.graphName = @"pose_tracking_gpu";
|
self.graphName = @"pose_tracking_gpu";
|
||||||
self.mediapipeGraph = [[self class] loadGraphFromResource: self.graphName];
|
self->mediapipeGraph = [[self class] loadGraphFromResource: self.graphName];
|
||||||
self.graphInputStream = "input_video";
|
self.graphInputStream = "input_video";
|
||||||
|
|
||||||
|
|
||||||
|
@ -53,19 +140,25 @@ static const char* kLandmarksOutputStream = "pose_landmarks";
|
||||||
self.graphOutputStream = "throttled_input_video";
|
self.graphOutputStream = "throttled_input_video";
|
||||||
}
|
}
|
||||||
|
|
||||||
[self.mediapipeGraph addFrameOutputStream:self.graphOutputStream
|
[self->mediapipeGraph addFrameOutputStream:self.graphOutputStream
|
||||||
outputPacketType:MPPPacketTypePixelBuffer];
|
outputPacketType:MPPPacketTypePixelBuffer];
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
[self.mediapipeGraph addFrameOutputStream:"pose_landmarks"
|
|
||||||
outputPacketType:MPPPacketTypeRaw];
|
|
||||||
|
|
||||||
self.mediapipeGraph.delegate = self;
|
|
||||||
|
|
||||||
self.poseTrackingResultsListener = ^(PoseTrackingResults*){};
|
self.poseTrackingResultsListener = ^(PoseTrackingResults*){};
|
||||||
|
|
||||||
|
|
||||||
|
[self->mediapipeGraph addFrameOutputStream:"pose_landmarks"
|
||||||
|
outputPacketType:MPPPacketTypeRaw];
|
||||||
|
self-> poseTrackingGraphDelegate = [[PoseTrackingGraphDelegate alloc] initWithMediapipeGraph:self->mediapipeGraph graphOutputStream:self.graphOutputStream renderer:self.renderer];
|
||||||
|
// To prevent ARC from causing an accidental memory leak in the next block
|
||||||
|
__weak PoseTracking* weakSelf = self;
|
||||||
|
self -> poseTrackingGraphDelegate.poseTrackingResultsListener = ^(PoseTrackingResults* results){
|
||||||
|
|
||||||
|
weakSelf.poseTrackingResultsListener(results);
|
||||||
|
};
|
||||||
|
self->mediapipeGraph.delegate = self->poseTrackingGraphDelegate;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
return self;
|
return self;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -74,10 +167,10 @@ static const char* kLandmarksOutputStream = "pose_landmarks";
|
||||||
- (void)startGraph {
|
- (void)startGraph {
|
||||||
// Start running self.mediapipeGraph.
|
// Start running self.mediapipeGraph.
|
||||||
NSError* error;
|
NSError* error;
|
||||||
if (![self.mediapipeGraph startWithError:&error]) {
|
if (![self->mediapipeGraph startWithError:&error]) {
|
||||||
NSLog(@"Failed to start graph: %@", error);
|
NSLog(@"Failed to start graph: %@", error);
|
||||||
}
|
}
|
||||||
else if (![self.mediapipeGraph waitUntilIdleWithError:&error]) {
|
else if (![self->mediapipeGraph waitUntilIdleWithError:&error]) {
|
||||||
NSLog(@"Failed to complete graph initial run: %@", error);
|
NSLog(@"Failed to complete graph initial run: %@", error);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -100,7 +193,7 @@ static const char* kLandmarksOutputStream = "pose_landmarks";
|
||||||
timestamp:(CMTime)timestamp
|
timestamp:(CMTime)timestamp
|
||||||
fromSource:(MPPInputSource*)source {
|
fromSource:(MPPInputSource*)source {
|
||||||
|
|
||||||
[self.mediapipeGraph sendPixelBuffer:imageBuffer
|
[self->mediapipeGraph sendPixelBuffer:imageBuffer
|
||||||
intoStream:self.graphInputStream
|
intoStream:self.graphInputStream
|
||||||
packetType:MPPPacketTypePixelBuffer
|
packetType:MPPPacketTypePixelBuffer
|
||||||
timestamp:[self.timestampConverter timestampForMediaTime:timestamp]];
|
timestamp:[self.timestampConverter timestampForMediaTime:timestamp]];
|
||||||
|
@ -108,42 +201,5 @@ static const char* kLandmarksOutputStream = "pose_landmarks";
|
||||||
|
|
||||||
#pragma mark - MPPGraphDelegate methods
|
#pragma mark - MPPGraphDelegate methods
|
||||||
|
|
||||||
// Receives CVPixelBufferRef from the MediaPipe graph. Invoked on a MediaPipe worker thread.
|
|
||||||
- (void)mediapipeGraph:(MPPGraph*)graph
|
|
||||||
didOutputPixelBuffer:(CVPixelBufferRef)pixelBuffer
|
|
||||||
fromStream:(const std::string&)streamName {
|
|
||||||
if (streamName == self.graphOutputStream) {
|
|
||||||
// Display the captured image on the screen.
|
|
||||||
CVPixelBufferRetain(pixelBuffer);
|
|
||||||
dispatch_async(dispatch_get_main_queue(), ^{
|
|
||||||
[self.renderer renderPixelBuffer:pixelBuffer];
|
|
||||||
CVPixelBufferRelease(pixelBuffer);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
// Receives a raw packet from the MediaPipe graph. Invoked on a MediaPipe worker thread.
|
|
||||||
- (void)mediapipeGraph:(MPPGraph*)graph
|
|
||||||
didOutputPacket:(const ::mediapipe::Packet&)packet
|
|
||||||
fromStream:(const std::string&)streamName {
|
|
||||||
|
|
||||||
if (streamName == kLandmarksOutputStream) {
|
|
||||||
|
|
||||||
|
|
||||||
if (packet.IsEmpty()) {
|
|
||||||
self.poseTrackingResultsListener(nil);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const auto& landmarks = packet.Get<::mediapipe::NormalizedLandmarkList>();
|
|
||||||
NSMutableArray<PoseLandmark*>* poseLandmarks = [[NSMutableArray<PoseLandmark*> alloc] init];
|
|
||||||
for (int i = 0; i < landmarks.landmark_size(); ++i) {
|
|
||||||
|
|
||||||
[poseLandmarks addObject: [[PoseLandmark alloc] initWithX:landmarks.landmark(i).x() y:landmarks.landmark(i).y() z:landmarks.landmark(i).z() presence:landmarks.landmark(i).presence() visibility:landmarks.landmark(i).visibility()] ];
|
|
||||||
}
|
|
||||||
PoseTrackingResults* results = [[PoseTrackingResults alloc] initWithLandmarks:poseLandmarks];
|
|
||||||
self.poseTrackingResultsListener(results);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
@end
|
@end
|
||||||
|
|
|
@ -3,4 +3,7 @@ load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library")
|
||||||
swift_library(
|
swift_library(
|
||||||
name = "Lindera",
|
name = "Lindera",
|
||||||
srcs = glob(["*.swift"]),
|
srcs = glob(["*.swift"]),
|
||||||
|
deps = [
|
||||||
|
"//mediapipe/objc/solutions/posetracking_gpu:posetracking_gpu_solution",
|
||||||
|
]
|
||||||
)
|
)
|
||||||
|
|
Loading…
Reference in New Issue
Block a user