Face Mesh: ヘッダとmmファイル作成

This commit is contained in:
Shodai Yada 2022-10-22 19:36:57 +09:00
parent 62d2ae601e
commit 42d1da0391
5 changed files with 163 additions and 0 deletions

1
.gitignore vendored
View File

@ -4,3 +4,4 @@ mediapipe/MediaPipe.tulsiproj/*.tulsiconf-user
mediapipe/provisioning_profile.mobileprovision mediapipe/provisioning_profile.mobileprovision
.configure.bazelrc .configure.bazelrc
.user.bazelrc .user.bazelrc
mediapipe/.DS_Store

View File

@ -0,0 +1,23 @@
#import <Foundation/Foundation.h>
#import <CoreVideo/CoreVideo.h>
@class Landmark;
@class SYFaceMesh;
@protocol SYFaceMeshDelegate <NSObject>
- (void)faceMeshTracker: (SYFaceMesh*)faceMeshTracker didOutputLandmarks: (NSArray<Landmark *> *)landmarks;
- (void)faceMeshTracker: (SYFaceMesh*)faceMeshTracker didOutputPixelBuffer: (CVPixelBufferRef)pixelBuffer;
@end
@interface SYFaceMesh : NSObject
- (instancetype)init;
- (void)startGraph;
- (void)processVideoFrame: (CVPixelBufferRef)imageBuffer;
@property (weak, nonatomic) id <SYFaceMeshDelegate> delegate;
@end
@interface Landmark: NSObject
@property(nonatomic, readonly) float x;
@property(nonatomic, readonly) float y;
@property(nonatomic, readonly) float z;
@end

View File

@ -0,0 +1,139 @@
#import "MPPBIris.h"
#import "mediapipe/objc/MPPGraph.h"
#import "mediapipe/objc/MPPCameraInputSource.h"
#import "mediapipe/objc/MPPLayerRenderer.h"
#include "mediapipe/framework/formats/landmark.pb.h"
static NSString* const kGraphName = @"face_mesh_mobile_gpu";
static const char* kInputStream = "input_video";
static const char* kOutputStream = "output_video";
static const char* kLandmarksOutputStream = "multi_face_landmarks";
static const char* kVideoQueueLabel = "com.mediapipe.prebuilt.example.videoQueue";
@interface HandTracker() <MPPGraphDelegate>
@property(nonatomic) MPPGraph* mediapipeGraph;
@end
@interface Landmark()
- (instancetype)initWithX:(float)x y:(float)y z:(float)z;
@end
@implementation SYFaceMesh {}
#pragma mark - Cleanup methods
- (void)dealloc {
self.mediapipeGraph.delegate = nil;
[self.mediapipeGraph cancel];
// Ignore errors since we're cleaning up.
[self.mediapipeGraph closeAllInputStreamsWithError:nil];
[self.mediapipeGraph waitUntilDoneWithError:nil];
}
#pragma mark - MediaPipe graph methods
// https://google.github.io/mediapipe/getting_started/hello_world_ios.html#using-a-mediapipe-graph-in-ios
+ (MPPGraph*)loadGraphFromResource:(NSString*)resource {
// Load the graph config resource.
NSError* configLoadError = nil;
NSBundle* bundle = [NSBundle bundleForClass:[self class]];
if (!resource || resource.length == 0) {
return nil;
}
NSURL* graphURL = [bundle URLForResource:resource withExtension:@"binarypb"];
NSData* data = [NSData dataWithContentsOfURL:graphURL options:0 error:&configLoadError];
if (!data) {
NSLog(@"Failed to load MediaPipe graph config: %@", configLoadError);
return nil;
}
// Parse the graph config resource into mediapipe::CalculatorGraphConfig proto object.
mediapipe::CalculatorGraphConfig config;
config.ParseFromArray(data.bytes, data.length);
// Create MediaPipe graph with mediapipe::CalculatorGraphConfig proto object.
MPPGraph* newGraph = [[MPPGraph alloc] initWithGraphConfig:config];
[newGraph addFrameOutputStream:kOutputStream outputPacketType:MPPPacketTypePixelBuffer];
[newGraph addFrameOutputStream:kLandmarksOutputStream outputPacketType:MPPPacketTypeRaw];
return newGraph;
}
- (instancetype)init
{
self = [super init];
if (self) {
self.mediapipeGraph = [[self class] loadGraphFromResource:kGraphName];
self.mediapipeGraph.delegate = self;
// Set maxFramesInFlight to a small value to avoid memory contention for real-time processing.
self.mediapipeGraph.maxFramesInFlight = 2;
}
return self;
}
- (void)startGraph {
// Start running self.mediapipeGraph.
NSError* error;
if (![self.mediapipeGraph startWithError:&error]) {
NSLog(@"Failed to start graph: %@", error);
}
}
#pragma mark - MPPGraphDelegate methods
// Receives CVPixelBufferRef from the MediaPipe graph. Invoked on a MediaPipe worker thread.
- (void)mediapipeGraph:(MPPGraph*)graph
didOutputPixelBuffer:(CVPixelBufferRef)pixelBuffer
fromStream:(const std::string&)streamName {
if (streamName == kOutputStream) {
[_delegate faceMeshTracker: self didOutputPixelBuffer: pixelBuffer];
}
}
// Receives a raw packet from the MediaPipe graph. Invoked on a MediaPipe worker thread.
- (void)mediapipeGraph:(MPPGraph*)graph
didOutputPacket:(const ::mediapipe::Packet&)packet
fromStream:(const std::string&)streamName {
if (streamName == kLandmarksOutputStream) {
if (packet.IsEmpty()) { return; }
const auto& landmarks = packet.Get<::mediapipe::NormalizedLandmarkList>();
// for (int i = 0; i < landmarks.landmark_size(); ++i) {
// NSLog(@"\tLandmark[%d]: (%f, %f, %f)", i, landmarks.landmark(i).x(),
// landmarks.landmark(i).y(), landmarks.landmark(i).z());
// }
NSMutableArray<Landmark *> *result = [NSMutableArray array];
for (int i = 0; i < landmarks.landmark_size(); ++i) {
Landmark *landmark = [[Landmark alloc] initWithX:landmarks.landmark(i).x()
y:landmarks.landmark(i).y()
z:landmarks.landmark(i).z()];
[result addObject:landmark];
}
[_delegate faceMeshTracker: self didOutputLandmarks: result];
}
}
- (void)processVideoFrame:(CVPixelBufferRef)imageBuffer {
[self.mediapipeGraph sendPixelBuffer:imageBuffer
intoStream:kInputStream
packetType:MPPPacketTypePixelBuffer];
}
@end
@implementation Landmark
- (instancetype)initWithX:(float)x y:(float)y z:(float)z
{
self = [super init];
if (self) {
_x = x;
_y = y;
_z = z;
}
return self;
}
@end

View File

View File