PoseTracking class implementation
This commit is contained in:
parent
350c841dc4
commit
1b375a7242
|
@ -23,6 +23,8 @@
|
||||||
"mediapipe/objc/testing/app/BUILD"
|
"mediapipe/objc/testing/app/BUILD"
|
||||||
],
|
],
|
||||||
"buildTargets" : [
|
"buildTargets" : [
|
||||||
|
"//mediapipe/examples/ios/common:CommonMediaPipeAppLibrary",
|
||||||
|
"//mediapipe/examples/ios/posetrackingsolutiongpu:CommonMediaPipeAppLibrary",
|
||||||
"//mediapipe/examples/ios/posetrackingsolutiongpu:PoseTrackingSolutionGpuApp"
|
"//mediapipe/examples/ios/posetrackingsolutiongpu:PoseTrackingSolutionGpuApp"
|
||||||
],
|
],
|
||||||
"optionSet" : {
|
"optionSet" : {
|
||||||
|
@ -92,6 +94,7 @@
|
||||||
"mediapipe/examples/ios/objectdetectiongpu",
|
"mediapipe/examples/ios/objectdetectiongpu",
|
||||||
"mediapipe/examples/ios/posetrackinggpu",
|
"mediapipe/examples/ios/posetrackinggpu",
|
||||||
"mediapipe/examples/ios/posetrackingsolutiongpu",
|
"mediapipe/examples/ios/posetrackingsolutiongpu",
|
||||||
|
"mediapipe/examples/ios/posetrackingsolutiongpu/Base.lproj",
|
||||||
"mediapipe/examples/ios/selfiesegmentationgpu",
|
"mediapipe/examples/ios/selfiesegmentationgpu",
|
||||||
"mediapipe/framework",
|
"mediapipe/framework",
|
||||||
"mediapipe/framework/deps",
|
"mediapipe/framework/deps",
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
"configDefaults" : {
|
"configDefaults" : {
|
||||||
"optionSet" : {
|
"optionSet" : {
|
||||||
"BazelBuildOptionsDebug" : {
|
"BazelBuildOptionsDebug" : {
|
||||||
"p" : "--config=debug"
|
"p" : "--config=debug --strip=never --features=oso_prefix_is_pwd --apple_generate_dsym"
|
||||||
},
|
},
|
||||||
"CLANG_CXX_LANGUAGE_STANDARD" : {
|
"CLANG_CXX_LANGUAGE_STANDARD" : {
|
||||||
"p" : "c++14"
|
"p" : "c++14"
|
||||||
|
@ -13,6 +13,7 @@
|
||||||
"",
|
"",
|
||||||
"mediapipe",
|
"mediapipe",
|
||||||
"mediapipe/examples/ios",
|
"mediapipe/examples/ios",
|
||||||
|
"mediapipe/examples/ios/common",
|
||||||
"mediapipe/examples/ios/facedetectioncpu",
|
"mediapipe/examples/ios/facedetectioncpu",
|
||||||
"mediapipe/examples/ios/facedetectiongpu",
|
"mediapipe/examples/ios/facedetectiongpu",
|
||||||
"mediapipe/examples/ios/faceeffect",
|
"mediapipe/examples/ios/faceeffect",
|
||||||
|
|
|
@ -13,8 +13,7 @@
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
#import "AppDelegate.h"
|
#import "AppDelegate.h"
|
||||||
|
#import "PoseTrackingViewController.h"
|
||||||
#import "CommonViewController.h"
|
|
||||||
|
|
||||||
@interface AppDelegate ()
|
@interface AppDelegate ()
|
||||||
|
|
||||||
|
|
|
@ -55,6 +55,10 @@ objc_library(
|
||||||
name = "PoseTrackingGpuAppLibrary",
|
name = "PoseTrackingGpuAppLibrary",
|
||||||
srcs = [
|
srcs = [
|
||||||
"PoseTrackingViewController.mm",
|
"PoseTrackingViewController.mm",
|
||||||
|
] + [
|
||||||
|
"AppDelegate.mm",
|
||||||
|
"main.mm",
|
||||||
|
"AppDelegate.h",
|
||||||
],
|
],
|
||||||
hdrs = [
|
hdrs = [
|
||||||
"PoseTrackingViewController.h",
|
"PoseTrackingViewController.h",
|
||||||
|
@ -64,10 +68,21 @@ objc_library(
|
||||||
"//mediapipe/graphs/pose_tracking:pose_tracking_gpu.binarypb",
|
"//mediapipe/graphs/pose_tracking:pose_tracking_gpu.binarypb",
|
||||||
"//mediapipe/modules/pose_detection:pose_detection.tflite",
|
"//mediapipe/modules/pose_detection:pose_detection.tflite",
|
||||||
"//mediapipe/modules/pose_landmark:pose_landmark_full.tflite",
|
"//mediapipe/modules/pose_landmark:pose_landmark_full.tflite",
|
||||||
|
] + [
|
||||||
|
"Base.lproj/LaunchScreen.storyboard",
|
||||||
|
"Base.lproj/Main.storyboard",
|
||||||
|
],
|
||||||
|
sdk_frameworks = [
|
||||||
|
"AVFoundation",
|
||||||
|
"CoreGraphics",
|
||||||
|
"CoreMedia",
|
||||||
|
"UIKit",
|
||||||
],
|
],
|
||||||
deps = [
|
deps = [
|
||||||
":CommonMediaPipeAppLibrary",
|
|
||||||
"//mediapipe/objc/solutions/posetracking_gpu:posetracking_gpu_solution",
|
"//mediapipe/objc/solutions/posetracking_gpu:posetracking_gpu_solution",
|
||||||
|
"//mediapipe/objc:mediapipe_framework_ios",
|
||||||
|
"//mediapipe/objc:mediapipe_input_sources_ios",
|
||||||
|
"//mediapipe/objc:mediapipe_layer_renderer",
|
||||||
] + select({
|
] + select({
|
||||||
"//mediapipe:ios_i386": [],
|
"//mediapipe:ios_i386": [],
|
||||||
"//mediapipe:ios_x86_64": [],
|
"//mediapipe:ios_x86_64": [],
|
||||||
|
@ -78,38 +93,34 @@ objc_library(
|
||||||
}),
|
}),
|
||||||
)
|
)
|
||||||
|
|
||||||
objc_library(
|
#objc_library(
|
||||||
name = "CommonMediaPipeAppLibrary",
|
# name = "CommonMediaPipeAppLibrary",
|
||||||
srcs = [
|
# srcs = [
|
||||||
"AppDelegate.mm",
|
# "AppDelegate.mm",
|
||||||
"CommonViewController.mm",
|
# "main.m",
|
||||||
"main.m",
|
# ],
|
||||||
],
|
# hdrs = [
|
||||||
hdrs = [
|
# "AppDelegate.h",
|
||||||
"AppDelegate.h",
|
# ],
|
||||||
"CommonViewController.h",
|
# data = [
|
||||||
],
|
# "Base.lproj/LaunchScreen.storyboard",
|
||||||
data = [
|
# "Base.lproj/Main.storyboard",
|
||||||
"Base.lproj/LaunchScreen.storyboard",
|
# ],
|
||||||
"Base.lproj/Main.storyboard",
|
# sdk_frameworks = [
|
||||||
],
|
# "AVFoundation",
|
||||||
sdk_frameworks = [
|
# "CoreGraphics",
|
||||||
"AVFoundation",
|
# "CoreMedia",
|
||||||
"CoreGraphics",
|
# "UIKit",
|
||||||
"CoreMedia",
|
# ],
|
||||||
"UIKit",
|
# visibility = [
|
||||||
],
|
# "//mediapipe:__subpackages__",
|
||||||
visibility = [
|
# ],
|
||||||
"//mediapipe:__subpackages__",
|
# deps = [
|
||||||
],
|
# "//mediapipe/objc:mediapipe_framework_ios",
|
||||||
deps = [
|
# "//mediapipe/objc:mediapipe_input_sources_ios",
|
||||||
"//mediapipe/objc:mediapipe_framework_ios",
|
# "//mediapipe/objc:mediapipe_layer_renderer",
|
||||||
"//mediapipe/objc:mediapipe_input_sources_ios",
|
# ],
|
||||||
"//mediapipe/objc:mediapipe_layer_renderer",
|
#)
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
exports_files(["Info.plist"])
|
|
||||||
|
|
||||||
filegroup(
|
filegroup(
|
||||||
name = "AppIcon",
|
name = "AppIcon",
|
||||||
|
|
|
@ -1,16 +1,17 @@
|
||||||
<?xml version="1.0" encoding="UTF-8"?>
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="16097" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES" initialViewController="BYZ-38-t0r">
|
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="20037" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES" initialViewController="BYZ-38-t0r">
|
||||||
<device id="retina4_7" orientation="portrait" appearance="light"/>
|
<device id="retina4_7" orientation="portrait" appearance="light"/>
|
||||||
<dependencies>
|
<dependencies>
|
||||||
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="16087"/>
|
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="20020"/>
|
||||||
<capability name="Safe area layout guides" minToolsVersion="9.0"/>
|
<capability name="Safe area layout guides" minToolsVersion="9.0"/>
|
||||||
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
|
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
|
||||||
</dependencies>
|
</dependencies>
|
||||||
<scenes>
|
<scenes>
|
||||||
<!--Common View Controller-->
|
<!--Pose Tracking View Controller-->
|
||||||
<scene sceneID="tne-QT-ifu">
|
<scene sceneID="tne-QT-ifu">
|
||||||
<objects>
|
<objects>
|
||||||
<viewController id="BYZ-38-t0r" customClass="CommonViewController" sceneMemberID="viewController">
|
<placeholder placeholderIdentifier="IBFirstResponder" id="dkx-z0-nzr" sceneMemberID="firstResponder"/>
|
||||||
|
<viewController id="BYZ-38-t0r" customClass="PoseTrackingViewController" sceneMemberID="viewController">
|
||||||
<view key="view" contentMode="scaleToFill" id="8bC-Xf-vdC">
|
<view key="view" contentMode="scaleToFill" id="8bC-Xf-vdC">
|
||||||
<rect key="frame" x="0.0" y="0.0" width="375" height="667"/>
|
<rect key="frame" x="0.0" y="0.0" width="375" height="667"/>
|
||||||
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
|
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
|
||||||
|
@ -18,30 +19,19 @@
|
||||||
<view contentMode="scaleToFill" fixedFrame="YES" translatesAutoresizingMaskIntoConstraints="NO" id="EfB-xq-knP">
|
<view contentMode="scaleToFill" fixedFrame="YES" translatesAutoresizingMaskIntoConstraints="NO" id="EfB-xq-knP">
|
||||||
<rect key="frame" x="0.0" y="0.0" width="375" height="667"/>
|
<rect key="frame" x="0.0" y="0.0" width="375" height="667"/>
|
||||||
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
|
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
|
||||||
<subviews>
|
|
||||||
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" fixedFrame="YES" text="Camera access needed for this demo. Please enable camera access in the Settings app." textAlignment="center" lineBreakMode="tailTruncation" numberOfLines="0" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="emf-N5-sEd">
|
|
||||||
<rect key="frame" x="57" y="258" width="260" height="151"/>
|
|
||||||
<autoresizingMask key="autoresizingMask" flexibleMinX="YES" flexibleMaxX="YES" flexibleMinY="YES" flexibleMaxY="YES"/>
|
|
||||||
<fontDescription key="fontDescription" type="system" pointSize="17"/>
|
|
||||||
<color key="textColor" white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
|
|
||||||
<nil key="highlightedColor"/>
|
|
||||||
</label>
|
|
||||||
</subviews>
|
|
||||||
<color key="backgroundColor" white="0.0" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
|
<color key="backgroundColor" white="0.0" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
|
||||||
<accessibility key="accessibilityConfiguration" label="PreviewDisplayView">
|
<accessibility key="accessibilityConfiguration" label="PreviewDisplayView">
|
||||||
<bool key="isElement" value="YES"/>
|
<bool key="isElement" value="YES"/>
|
||||||
</accessibility>
|
</accessibility>
|
||||||
</view>
|
</view>
|
||||||
</subviews>
|
</subviews>
|
||||||
<color key="backgroundColor" red="1" green="1" blue="1" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
|
|
||||||
<viewLayoutGuide key="safeArea" id="6Tk-OE-BBY"/>
|
<viewLayoutGuide key="safeArea" id="6Tk-OE-BBY"/>
|
||||||
|
<color key="backgroundColor" red="1" green="1" blue="1" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
|
||||||
</view>
|
</view>
|
||||||
<connections>
|
<connections>
|
||||||
<outlet property="liveView" destination="8bC-Xf-vdC" id="3qM-tM-inb"/>
|
<outlet property="liveView" destination="8bC-Xf-vdC" id="3qM-tM-inb"/>
|
||||||
<outlet property="noCameraLabel" destination="emf-N5-sEd" id="TUU-KL-fTU"/>
|
|
||||||
</connections>
|
</connections>
|
||||||
</viewController>
|
</viewController>
|
||||||
<placeholder placeholderIdentifier="IBFirstResponder" id="dkx-z0-nzr" sceneMemberID="firstResponder"/>
|
|
||||||
</objects>
|
</objects>
|
||||||
<point key="canvasLocation" x="48.799999999999997" y="20.239880059970016"/>
|
<point key="canvasLocation" x="48.799999999999997" y="20.239880059970016"/>
|
||||||
</scene>
|
</scene>
|
||||||
|
|
|
@ -1,67 +0,0 @@
|
||||||
// Copyright 2019 The MediaPipe Authors.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
#import <UIKit/UIKit.h>
|
|
||||||
|
|
||||||
#import "mediapipe/objc/MPPCameraInputSource.h"
|
|
||||||
#import "mediapipe/objc/MPPGraph.h"
|
|
||||||
#import "mediapipe/objc/MPPLayerRenderer.h"
|
|
||||||
#import "mediapipe/objc/MPPPlayerInputSource.h"
|
|
||||||
#import "mediapipe/objc/MPPTimestampConverter.h"
|
|
||||||
|
|
||||||
typedef NS_ENUM(NSInteger, MediaPipeDemoSourceMode) {
|
|
||||||
MediaPipeDemoSourceCamera,
|
|
||||||
MediaPipeDemoSourceVideo
|
|
||||||
};
|
|
||||||
|
|
||||||
@interface CommonViewController : UIViewController <MPPGraphDelegate, MPPInputSourceDelegate>
|
|
||||||
|
|
||||||
// The MediaPipe graph currently in use. Initialized in viewDidLoad, started in
|
|
||||||
// viewWillAppear: and sent video frames on videoQueue.
|
|
||||||
@property(nonatomic) MPPGraph* mediapipeGraph;
|
|
||||||
|
|
||||||
// Handles camera access via AVCaptureSession library.
|
|
||||||
@property(nonatomic) MPPCameraInputSource* cameraSource;
|
|
||||||
|
|
||||||
// Provides data from a video.
|
|
||||||
@property(nonatomic) MPPPlayerInputSource* videoSource;
|
|
||||||
|
|
||||||
// Helps to convert timestamp.
|
|
||||||
@property(nonatomic) MPPTimestampConverter* timestampConverter;
|
|
||||||
|
|
||||||
// The data source for the demo.
|
|
||||||
@property(nonatomic) MediaPipeDemoSourceMode sourceMode;
|
|
||||||
|
|
||||||
// Inform the user when camera is unavailable.
|
|
||||||
@property(nonatomic) IBOutlet UILabel* noCameraLabel;
|
|
||||||
|
|
||||||
// Display the camera preview frames.
|
|
||||||
@property(strong, nonatomic) IBOutlet UIView* liveView;
|
|
||||||
|
|
||||||
// Render frames in a layer.
|
|
||||||
@property(nonatomic) MPPLayerRenderer* renderer;
|
|
||||||
|
|
||||||
// Process camera frames on this queue.
|
|
||||||
@property(nonatomic) dispatch_queue_t videoQueue;
|
|
||||||
|
|
||||||
// Graph name.
|
|
||||||
@property(nonatomic) NSString* graphName;
|
|
||||||
|
|
||||||
// Graph input stream.
|
|
||||||
@property(nonatomic) const char* graphInputStream;
|
|
||||||
|
|
||||||
// Graph output stream.
|
|
||||||
@property(nonatomic) const char* graphOutputStream;
|
|
||||||
|
|
||||||
@end
|
|
|
@ -1,201 +0,0 @@
|
||||||
// Copyright 2019 The MediaPipe Authors.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
#import "CommonViewController.h"
|
|
||||||
|
|
||||||
static const char* kVideoQueueLabel = "com.google.mediapipe.example.videoQueue";
|
|
||||||
|
|
||||||
@implementation CommonViewController
|
|
||||||
|
|
||||||
// This provides a hook to replace the basic ViewController with a subclass when it's created from a
|
|
||||||
// storyboard, without having to change the storyboard itself.
|
|
||||||
+ (instancetype)allocWithZone:(struct _NSZone*)zone {
|
|
||||||
NSString* subclassName = [[NSBundle mainBundle] objectForInfoDictionaryKey:@"MainViewController"];
|
|
||||||
if (subclassName.length > 0) {
|
|
||||||
Class customClass = NSClassFromString(subclassName);
|
|
||||||
Class baseClass = [CommonViewController class];
|
|
||||||
NSAssert([customClass isSubclassOfClass:baseClass], @"%@ must be a subclass of %@", customClass,
|
|
||||||
baseClass);
|
|
||||||
if (self == baseClass) return [customClass allocWithZone:zone];
|
|
||||||
}
|
|
||||||
return [super allocWithZone:zone];
|
|
||||||
}
|
|
||||||
|
|
||||||
#pragma mark - Cleanup methods
|
|
||||||
|
|
||||||
- (void)dealloc {
|
|
||||||
self.mediapipeGraph.delegate = nil;
|
|
||||||
[self.mediapipeGraph cancel];
|
|
||||||
// Ignore errors since we're cleaning up.
|
|
||||||
[self.mediapipeGraph closeAllInputStreamsWithError:nil];
|
|
||||||
[self.mediapipeGraph waitUntilDoneWithError:nil];
|
|
||||||
}
|
|
||||||
|
|
||||||
#pragma mark - MediaPipe graph methods
|
|
||||||
|
|
||||||
+ (MPPGraph*)loadGraphFromResource:(NSString*)resource {
|
|
||||||
// Load the graph config resource.
|
|
||||||
NSError* configLoadError = nil;
|
|
||||||
NSBundle* bundle = [NSBundle bundleForClass:[self class]];
|
|
||||||
if (!resource || resource.length == 0) {
|
|
||||||
return nil;
|
|
||||||
}
|
|
||||||
NSURL* graphURL = [bundle URLForResource:resource withExtension:@"binarypb"];
|
|
||||||
NSData* data = [NSData dataWithContentsOfURL:graphURL options:0 error:&configLoadError];
|
|
||||||
if (!data) {
|
|
||||||
NSLog(@"Failed to load MediaPipe graph config: %@", configLoadError);
|
|
||||||
return nil;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse the graph config resource into mediapipe::CalculatorGraphConfig proto object.
|
|
||||||
mediapipe::CalculatorGraphConfig config;
|
|
||||||
config.ParseFromArray(data.bytes, data.length);
|
|
||||||
|
|
||||||
// Create MediaPipe graph with mediapipe::CalculatorGraphConfig proto object.
|
|
||||||
MPPGraph* newGraph = [[MPPGraph alloc] initWithGraphConfig:config];
|
|
||||||
return newGraph;
|
|
||||||
}
|
|
||||||
|
|
||||||
#pragma mark - UIViewController methods
|
|
||||||
|
|
||||||
- (void)viewDidLoad {
|
|
||||||
[super viewDidLoad];
|
|
||||||
|
|
||||||
self.renderer = [[MPPLayerRenderer alloc] init];
|
|
||||||
self.renderer.layer.frame = self.liveView.layer.bounds;
|
|
||||||
[self.liveView.layer addSublayer:self.renderer.layer];
|
|
||||||
self.renderer.frameScaleMode = MPPFrameScaleModeFillAndCrop;
|
|
||||||
|
|
||||||
self.timestampConverter = [[MPPTimestampConverter alloc] init];
|
|
||||||
|
|
||||||
dispatch_queue_attr_t qosAttribute = dispatch_queue_attr_make_with_qos_class(
|
|
||||||
DISPATCH_QUEUE_SERIAL, QOS_CLASS_USER_INTERACTIVE, /*relative_priority=*/0);
|
|
||||||
self.videoQueue = dispatch_queue_create(kVideoQueueLabel, qosAttribute);
|
|
||||||
|
|
||||||
self.graphName = [[NSBundle mainBundle] objectForInfoDictionaryKey:@"GraphName"];
|
|
||||||
self.graphInputStream =
|
|
||||||
[[[NSBundle mainBundle] objectForInfoDictionaryKey:@"GraphInputStream"] UTF8String];
|
|
||||||
self.graphOutputStream =
|
|
||||||
[[[NSBundle mainBundle] objectForInfoDictionaryKey:@"GraphOutputStream"] UTF8String];
|
|
||||||
|
|
||||||
self.mediapipeGraph = [[self class] loadGraphFromResource:self.graphName];
|
|
||||||
[self.mediapipeGraph addFrameOutputStream:self.graphOutputStream
|
|
||||||
outputPacketType:MPPPacketTypePixelBuffer];
|
|
||||||
|
|
||||||
self.mediapipeGraph.delegate = self;
|
|
||||||
}
|
|
||||||
|
|
||||||
// In this application, there is only one ViewController which has no navigation to other view
|
|
||||||
// controllers, and there is only one View with live display showing the result of running the
|
|
||||||
// MediaPipe graph on the live video feed. If more view controllers are needed later, the graph
|
|
||||||
// setup/teardown and camera start/stop logic should be updated appropriately in response to the
|
|
||||||
// appearance/disappearance of this ViewController, as viewWillAppear: can be invoked multiple times
|
|
||||||
// depending on the application navigation flow in that case.
|
|
||||||
- (void)viewWillAppear:(BOOL)animated {
|
|
||||||
[super viewWillAppear:animated];
|
|
||||||
|
|
||||||
switch (self.sourceMode) {
|
|
||||||
case MediaPipeDemoSourceVideo: {
|
|
||||||
NSString* videoName = [[NSBundle mainBundle] objectForInfoDictionaryKey:@"VideoName"];
|
|
||||||
AVAsset* video = [AVAsset assetWithURL:[[NSBundle mainBundle] URLForResource:videoName
|
|
||||||
withExtension:@"mov"]];
|
|
||||||
self.videoSource = [[MPPPlayerInputSource alloc] initWithAVAsset:video];
|
|
||||||
[self.videoSource setDelegate:self queue:self.videoQueue];
|
|
||||||
dispatch_async(self.videoQueue, ^{
|
|
||||||
[self.videoSource start];
|
|
||||||
});
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case MediaPipeDemoSourceCamera: {
|
|
||||||
self.cameraSource = [[MPPCameraInputSource alloc] init];
|
|
||||||
[self.cameraSource setDelegate:self queue:self.videoQueue];
|
|
||||||
self.cameraSource.sessionPreset = AVCaptureSessionPresetHigh;
|
|
||||||
|
|
||||||
NSString* cameraPosition =
|
|
||||||
[[NSBundle mainBundle] objectForInfoDictionaryKey:@"CameraPosition"];
|
|
||||||
if (cameraPosition.length > 0 && [cameraPosition isEqualToString:@"back"]) {
|
|
||||||
self.cameraSource.cameraPosition = AVCaptureDevicePositionBack;
|
|
||||||
} else {
|
|
||||||
self.cameraSource.cameraPosition = AVCaptureDevicePositionFront;
|
|
||||||
// When using the front camera, mirror the input for a more natural look.
|
|
||||||
_cameraSource.videoMirrored = YES;
|
|
||||||
}
|
|
||||||
|
|
||||||
// The frame's native format is rotated with respect to the portrait orientation.
|
|
||||||
_cameraSource.orientation = AVCaptureVideoOrientationPortrait;
|
|
||||||
|
|
||||||
[self.cameraSource requestCameraAccessWithCompletionHandler:^void(BOOL granted) {
|
|
||||||
if (granted) {
|
|
||||||
dispatch_async(dispatch_get_main_queue(), ^{
|
|
||||||
self.noCameraLabel.hidden = YES;
|
|
||||||
});
|
|
||||||
[self startGraphAndCamera];
|
|
||||||
}
|
|
||||||
}];
|
|
||||||
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)startGraphAndCamera {
|
|
||||||
// Start running self.mediapipeGraph.
|
|
||||||
NSError* error;
|
|
||||||
if (![self.mediapipeGraph startWithError:&error]) {
|
|
||||||
NSLog(@"Failed to start graph: %@", error);
|
|
||||||
}
|
|
||||||
else if (![self.mediapipeGraph waitUntilIdleWithError:&error]) {
|
|
||||||
NSLog(@"Failed to complete graph initial run: %@", error);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Start fetching frames from the camera.
|
|
||||||
dispatch_async(self.videoQueue, ^{
|
|
||||||
[self.cameraSource start];
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
#pragma mark - MPPInputSourceDelegate methods
|
|
||||||
|
|
||||||
// Must be invoked on self.videoQueue.
|
|
||||||
- (void)processVideoFrame:(CVPixelBufferRef)imageBuffer
|
|
||||||
timestamp:(CMTime)timestamp
|
|
||||||
fromSource:(MPPInputSource*)source {
|
|
||||||
if (source != self.cameraSource && source != self.videoSource) {
|
|
||||||
NSLog(@"Unknown source: %@", source);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
[self.mediapipeGraph sendPixelBuffer:imageBuffer
|
|
||||||
intoStream:self.graphInputStream
|
|
||||||
packetType:MPPPacketTypePixelBuffer
|
|
||||||
timestamp:[self.timestampConverter timestampForMediaTime:timestamp]];
|
|
||||||
}
|
|
||||||
|
|
||||||
#pragma mark - MPPGraphDelegate methods
|
|
||||||
|
|
||||||
// Receives CVPixelBufferRef from the MediaPipe graph. Invoked on a MediaPipe worker thread.
|
|
||||||
- (void)mediapipeGraph:(MPPGraph*)graph
|
|
||||||
didOutputPixelBuffer:(CVPixelBufferRef)pixelBuffer
|
|
||||||
fromStream:(const std::string&)streamName {
|
|
||||||
if (streamName == self.graphOutputStream) {
|
|
||||||
// Display the captured image on the screen.
|
|
||||||
CVPixelBufferRetain(pixelBuffer);
|
|
||||||
dispatch_async(dispatch_get_main_queue(), ^{
|
|
||||||
[self.renderer renderPixelBuffer:pixelBuffer];
|
|
||||||
CVPixelBufferRelease(pixelBuffer);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@end
|
|
|
@ -13,9 +13,13 @@
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
#import <UIKit/UIKit.h>
|
#import <UIKit/UIKit.h>
|
||||||
|
#import "mediapipe/objc/MPPCameraInputSource.h"
|
||||||
|
#import "mediapipe/objc/solutions/posetracking_gpu/PoseTracking.h"
|
||||||
|
|
||||||
#import "CommonViewController.h"
|
@interface PoseTrackingViewController : UIViewController
|
||||||
|
@property(strong, nonatomic) IBOutlet UIView* liveView;
|
||||||
@interface PoseTrackingViewController : CommonViewController
|
// Handles camera access via AVCaptureSession library.
|
||||||
|
@property(nonatomic) MPPCameraInputSource* cameraSource;
|
||||||
|
@property(nonatomic) PoseTracking* poseTracking;
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
|
@ -16,6 +16,7 @@
|
||||||
|
|
||||||
#include "mediapipe/framework/formats/landmark.pb.h"
|
#include "mediapipe/framework/formats/landmark.pb.h"
|
||||||
#include "mediapipe/objc/solutions/posetracking_gpu/PoseTrackingOptions.h"
|
#include "mediapipe/objc/solutions/posetracking_gpu/PoseTrackingOptions.h"
|
||||||
|
#include "mediapipe/objc/solutions/posetracking_gpu/PoseTracking.h"
|
||||||
|
|
||||||
static const char* kLandmarksOutputStream = "pose_landmarks";
|
static const char* kLandmarksOutputStream = "pose_landmarks";
|
||||||
|
|
||||||
|
@ -23,41 +24,64 @@ static const char* kLandmarksOutputStream = "pose_landmarks";
|
||||||
|
|
||||||
#pragma mark - UIViewController methods
|
#pragma mark - UIViewController methods
|
||||||
|
|
||||||
|
|
||||||
- (void)viewDidLoad {
|
- (void)viewDidLoad {
|
||||||
|
|
||||||
|
|
||||||
[super viewDidLoad];
|
[super viewDidLoad];
|
||||||
PoseTrackingOptions* options = [ [PoseTrackingOptions alloc] initWithShowLandmarks:true cameraRotation:0];
|
PoseTrackingOptions* options = [ [PoseTrackingOptions alloc] initWithShowLandmarks:true cameraRotation:0];
|
||||||
[self.mediapipeGraph addFrameOutputStream:kLandmarksOutputStream
|
self.poseTracking = [[PoseTracking alloc] initWithPoseTrackingOptions:options];
|
||||||
outputPacketType:MPPPacketTypeRaw];
|
|
||||||
[self.mediapipeGraph addFrameOutputStream:"throttled_input_video"
|
self.poseTracking.renderer.layer.frame = self.liveView.layer.bounds;
|
||||||
outputPacketType:MPPPacketTypePixelBuffer];
|
[self.liveView.layer addSublayer:self.poseTracking.renderer.layer];
|
||||||
if (options.showLandmarks){
|
|
||||||
self.graphOutputStream = "output_video";
|
|
||||||
}else{
|
|
||||||
self.graphOutputStream = "throttled_input_video";
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#pragma mark - MPPGraphDelegate methods
|
|
||||||
|
|
||||||
// Receives a raw packet from the MediaPipe graph. Invoked on a MediaPipe worker thread.
|
// In this application, there is only one ViewController which has no navigation to other view
|
||||||
- (void)mediapipeGraph:(MPPGraph*)graph
|
// controllers, and there is only one View with live display showing the result of running the
|
||||||
didOutputPacket:(const ::mediapipe::Packet&)packet
|
// MediaPipe graph on the live video feed. If more view controllers are needed later, the graph
|
||||||
fromStream:(const std::string&)streamName {
|
// setup/teardown and camera start/stop logic should be updated appropriately in response to the
|
||||||
if (streamName == kLandmarksOutputStream) {
|
// appearance/disappearance of this ViewController, as viewWillAppear: can be invoked multiple times
|
||||||
if (packet.IsEmpty()) {
|
// depending on the application navigation flow in that case.
|
||||||
NSLog(@"[TS:%lld] No pose landmarks", packet.Timestamp().Value());
|
- (void)viewWillAppear:(BOOL)animated {
|
||||||
return;
|
[super viewWillAppear:animated];
|
||||||
}
|
|
||||||
const auto& landmarks = packet.Get<::mediapipe::NormalizedLandmarkList>();
|
self.cameraSource = [[MPPCameraInputSource alloc] init];
|
||||||
NSLog(@"[TS:%lld] Number of pose landmarks: %d", packet.Timestamp().Value(),
|
[self.cameraSource setDelegate:self.poseTracking queue:self.poseTracking.videoQueue];
|
||||||
landmarks.landmark_size());
|
self.cameraSource.sessionPreset = AVCaptureSessionPresetHigh;
|
||||||
for (int i = 0; i < landmarks.landmark_size(); ++i) {
|
|
||||||
NSLog(@"\tLandmark[%d]: (%f, %f, %f)", i, landmarks.landmark(i).x(),
|
|
||||||
landmarks.landmark(i).y(), landmarks.landmark(i).z());
|
self.cameraSource.cameraPosition = AVCaptureDevicePositionBack;
|
||||||
}
|
|
||||||
}
|
// self.cameraSource.cameraPosition = AVCaptureDevicePositionFront;
|
||||||
|
// // When using the front camera, mirror the input for a more natural look.
|
||||||
|
// _cameraSource.videoMirrored = YES;
|
||||||
|
|
||||||
|
|
||||||
|
// The frame's native format is rotated with respect to the portrait orientation.
|
||||||
|
_cameraSource.orientation = AVCaptureVideoOrientationPortrait;
|
||||||
|
|
||||||
|
[self.cameraSource requestCameraAccessWithCompletionHandler:^void(BOOL granted) {
|
||||||
|
if (granted) {
|
||||||
|
|
||||||
|
[self.poseTracking startWithCamera:self.cameraSource];
|
||||||
|
}
|
||||||
|
}];
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//#pragma mark - MPPGraphDelegate methods
|
||||||
|
//
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
|
@ -4,11 +4,25 @@ objc_library(
|
||||||
"*.h",
|
"*.h",
|
||||||
"*.mm",
|
"*.mm",
|
||||||
]),
|
]),
|
||||||
hdrs = ["PoseTrackingOptions.h"],
|
hdrs = [
|
||||||
|
"PoseTracking.h",
|
||||||
|
"PoseTrackingOptions.h",
|
||||||
|
],
|
||||||
copts = [
|
copts = [
|
||||||
"-Wno-shorten-64-to-32",
|
"-Wno-shorten-64-to-32",
|
||||||
],
|
],
|
||||||
sdk_frameworks = ["Accelerate"],
|
sdk_frameworks = ["Accelerate"],
|
||||||
# This build rule is public to allow external customers to build their own iOS apps.
|
|
||||||
visibility = ["//visibility:public"],
|
visibility = ["//visibility:public"],
|
||||||
|
deps = [
|
||||||
|
"//mediapipe/objc:mediapipe_framework_ios",
|
||||||
|
"//mediapipe/objc:mediapipe_input_sources_ios",
|
||||||
|
"//mediapipe/objc:mediapipe_layer_renderer",
|
||||||
|
] + select({
|
||||||
|
"//mediapipe:ios_i386": [],
|
||||||
|
"//mediapipe:ios_x86_64": [],
|
||||||
|
"//conditions:default": [
|
||||||
|
"//mediapipe/graphs/pose_tracking:pose_tracking_gpu_deps",
|
||||||
|
"//mediapipe/framework/formats:landmark_cc_proto",
|
||||||
|
],
|
||||||
|
}),
|
||||||
)
|
)
|
||||||
|
|
49
mediapipe/objc/solutions/posetracking_gpu/PoseTracking.h
Normal file
49
mediapipe/objc/solutions/posetracking_gpu/PoseTracking.h
Normal file
|
@ -0,0 +1,49 @@
|
||||||
|
//
|
||||||
|
// Created by Mautisim Munir on 05/10/2022.
|
||||||
|
//
|
||||||
|
|
||||||
|
#ifndef MEDIAPIPE_POSETRACKING_H
|
||||||
|
#define MEDIAPIPE_POSETRACKING_H
|
||||||
|
#import <Foundation/Foundation.h>
|
||||||
|
#import "mediapipe/objc/MPPCameraInputSource.h"
|
||||||
|
#import "mediapipe/objc/MPPGraph.h"
|
||||||
|
#import "mediapipe/objc/MPPLayerRenderer.h"
|
||||||
|
#import "mediapipe/objc/MPPPlayerInputSource.h"
|
||||||
|
#import "mediapipe/objc/MPPTimestampConverter.h"
|
||||||
|
#import "PoseTrackingOptions.h"
|
||||||
|
@interface PoseTracking : NSObject<MPPGraphDelegate,MPPInputSourceDelegate>
|
||||||
|
|
||||||
|
// The MediaPipe graph currently in use. Initialized in viewDidLoad, started in
|
||||||
|
// viewWillAppear: and sent video frames on videoQueue.
|
||||||
|
@property(nonatomic) MPPGraph* mediapipeGraph;
|
||||||
|
|
||||||
|
|
||||||
|
// Helps to convert timestamp.
|
||||||
|
@property(nonatomic) MPPTimestampConverter* timestampConverter;
|
||||||
|
|
||||||
|
// Render frames in a layer.
|
||||||
|
@property(nonatomic) MPPLayerRenderer* renderer;
|
||||||
|
|
||||||
|
|
||||||
|
// Graph name.
|
||||||
|
@property(nonatomic) NSString* graphName;
|
||||||
|
|
||||||
|
// Graph input stream.
|
||||||
|
@property(nonatomic) const char* graphInputStream;
|
||||||
|
|
||||||
|
// Graph output stream.
|
||||||
|
@property(nonatomic) const char* graphOutputStream;
|
||||||
|
|
||||||
|
// Modify graph options
|
||||||
|
@property(nonatomic) PoseTrackingOptions* poseTrackingOptions;
|
||||||
|
|
||||||
|
|
||||||
|
// Process camera frames on this queue.
|
||||||
|
@property(nonatomic) dispatch_queue_t videoQueue;
|
||||||
|
|
||||||
|
- (instancetype) initWithPoseTrackingOptions: (PoseTrackingOptions*) poseTrackingOptions;
|
||||||
|
- (void) startWithCamera: (MPPCameraInputSource*) cameraSource;
|
||||||
|
@end
|
||||||
|
|
||||||
|
|
||||||
|
#endif //MEDIAPIPE_POSETRACKING_H
|
141
mediapipe/objc/solutions/posetracking_gpu/PoseTracking.mm
Normal file
141
mediapipe/objc/solutions/posetracking_gpu/PoseTracking.mm
Normal file
|
@ -0,0 +1,141 @@
|
||||||
|
#include "PoseTracking.h"
|
||||||
|
#include "mediapipe/framework/formats/landmark.pb.h"
|
||||||
|
|
||||||
|
static const char* kVideoQueueLabel = "com.google.mediapipe.example.videoQueue";
|
||||||
|
static const char* kLandmarksOutputStream = "pose_landmarks";
|
||||||
|
|
||||||
|
@implementation PoseTracking
|
||||||
|
|
||||||
|
#pragma mark - MediaPipe graph methods
|
||||||
|
|
||||||
|
+ (MPPGraph*)loadGraphFromResource:(NSString*)resource {
|
||||||
|
// Load the graph config resource.
|
||||||
|
NSError* configLoadError = nil;
|
||||||
|
NSBundle* bundle = [NSBundle bundleForClass:[self class]];
|
||||||
|
if (!resource || resource.length == 0) {
|
||||||
|
return nil;
|
||||||
|
}
|
||||||
|
NSURL* graphURL = [bundle URLForResource:resource withExtension:@"binarypb"];
|
||||||
|
NSData* data = [NSData dataWithContentsOfURL:graphURL options:0 error:&configLoadError];
|
||||||
|
if (!data) {
|
||||||
|
NSLog(@"Failed to load MediaPipe graph config: %@", configLoadError);
|
||||||
|
return nil;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse the graph config resource into mediapipe::CalculatorGraphConfig proto object.
|
||||||
|
mediapipe::CalculatorGraphConfig config;
|
||||||
|
config.ParseFromArray(data.bytes, data.length);
|
||||||
|
|
||||||
|
// Create MediaPipe graph with mediapipe::CalculatorGraphConfig proto object.
|
||||||
|
MPPGraph* newGraph = [[MPPGraph alloc] initWithGraphConfig:config];
|
||||||
|
return newGraph;
|
||||||
|
}
|
||||||
|
|
||||||
|
- (instancetype) initWithPoseTrackingOptions: (PoseTrackingOptions*) poseTrackingOptions{
|
||||||
|
self.renderer = [[MPPLayerRenderer alloc] init];
|
||||||
|
self.renderer.frameScaleMode = MPPFrameScaleModeFillAndCrop;
|
||||||
|
|
||||||
|
self.timestampConverter = [[MPPTimestampConverter alloc] init];
|
||||||
|
|
||||||
|
dispatch_queue_attr_t qosAttribute = dispatch_queue_attr_make_with_qos_class(
|
||||||
|
DISPATCH_QUEUE_SERIAL, QOS_CLASS_USER_INTERACTIVE, /*relative_priority=*/0);
|
||||||
|
self.videoQueue = dispatch_queue_create(kVideoQueueLabel, qosAttribute);
|
||||||
|
|
||||||
|
self.poseTrackingOptions = poseTrackingOptions;
|
||||||
|
self.graphName = @"pose_tracking_gpu";
|
||||||
|
self.mediapipeGraph = [[self class] loadGraphFromResource: self.graphName];
|
||||||
|
self.graphInputStream = "input_video";
|
||||||
|
|
||||||
|
if (poseTrackingOptions.showLandmarks){
|
||||||
|
self.graphOutputStream = "output_video";
|
||||||
|
}else{
|
||||||
|
self.graphOutputStream = "throttled_input_video";
|
||||||
|
}
|
||||||
|
|
||||||
|
[self.mediapipeGraph addFrameOutputStream:self.graphOutputStream
|
||||||
|
outputPacketType:MPPPacketTypePixelBuffer];
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
[self.mediapipeGraph addFrameOutputStream:"pose_landmarks"
|
||||||
|
outputPacketType:MPPPacketTypeRaw];
|
||||||
|
|
||||||
|
self.mediapipeGraph.delegate = self;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
return self;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
- (void)startGraph {
|
||||||
|
// Start running self.mediapipeGraph.
|
||||||
|
NSError* error;
|
||||||
|
if (![self.mediapipeGraph startWithError:&error]) {
|
||||||
|
NSLog(@"Failed to start graph: %@", error);
|
||||||
|
}
|
||||||
|
else if (![self.mediapipeGraph waitUntilIdleWithError:&error]) {
|
||||||
|
NSLog(@"Failed to complete graph initial run: %@", error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void) startWithCamera: (MPPCameraInputSource*) cameraSource {
|
||||||
|
[self startGraph];
|
||||||
|
// Start fetching frames from the camera.
|
||||||
|
dispatch_async(self.videoQueue, ^{
|
||||||
|
[cameraSource start];
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#pragma mark - MPPInputSourceDelegate methods
|
||||||
|
|
||||||
|
// Must be invoked on self.videoQueue.
|
||||||
|
- (void)processVideoFrame:(CVPixelBufferRef)imageBuffer
|
||||||
|
timestamp:(CMTime)timestamp
|
||||||
|
fromSource:(MPPInputSource*)source {
|
||||||
|
|
||||||
|
[self.mediapipeGraph sendPixelBuffer:imageBuffer
|
||||||
|
intoStream:self.graphInputStream
|
||||||
|
packetType:MPPPacketTypePixelBuffer
|
||||||
|
timestamp:[self.timestampConverter timestampForMediaTime:timestamp]];
|
||||||
|
}
|
||||||
|
|
||||||
|
#pragma mark - MPPGraphDelegate methods
|
||||||
|
|
||||||
|
// Receives CVPixelBufferRef from the MediaPipe graph. Invoked on a MediaPipe worker thread.
|
||||||
|
- (void)mediapipeGraph:(MPPGraph*)graph
|
||||||
|
didOutputPixelBuffer:(CVPixelBufferRef)pixelBuffer
|
||||||
|
fromStream:(const std::string&)streamName {
|
||||||
|
if (streamName == self.graphOutputStream) {
|
||||||
|
// Display the captured image on the screen.
|
||||||
|
CVPixelBufferRetain(pixelBuffer);
|
||||||
|
dispatch_async(dispatch_get_main_queue(), ^{
|
||||||
|
[self.renderer renderPixelBuffer:pixelBuffer];
|
||||||
|
CVPixelBufferRelease(pixelBuffer);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// Receives a raw packet from the MediaPipe graph. Invoked on a MediaPipe worker thread.
|
||||||
|
- (void)mediapipeGraph:(MPPGraph*)graph
|
||||||
|
didOutputPacket:(const ::mediapipe::Packet&)packet
|
||||||
|
fromStream:(const std::string&)streamName {
|
||||||
|
if (streamName == kLandmarksOutputStream) {
|
||||||
|
if (packet.IsEmpty()) {
|
||||||
|
NSLog(@"[TS:%lld] No pose landmarks", packet.Timestamp().Value());
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const auto& landmarks = packet.Get<::mediapipe::NormalizedLandmarkList>();
|
||||||
|
NSLog(@"[TS:%lld] Number of pose landmarks: %d", packet.Timestamp().Value(),
|
||||||
|
landmarks.landmark_size());
|
||||||
|
for (int i = 0; i < landmarks.landmark_size(); ++i) {
|
||||||
|
NSLog(@"\tLandmark[%d]: (%f, %f, %f)", i, landmarks.landmark(i).x(),
|
||||||
|
landmarks.landmark(i).y(), landmarks.landmark(i).z());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@end
|
|
@ -4,8 +4,8 @@
|
||||||
|
|
||||||
#ifndef MEDIAPIPE_POSETRACKINGOPTIONS_H
|
#ifndef MEDIAPIPE_POSETRACKINGOPTIONS_H
|
||||||
#define MEDIAPIPE_POSETRACKINGOPTIONS_H
|
#define MEDIAPIPE_POSETRACKINGOPTIONS_H
|
||||||
|
#import <Foundation/Foundation.h>
|
||||||
@interface PoseTrackingOptions
|
@interface PoseTrackingOptions: NSObject
|
||||||
|
|
||||||
@property(nonatomic) bool showLandmarks;
|
@property(nonatomic) bool showLandmarks;
|
||||||
@property(nonatomic) int cameraRotation;
|
@property(nonatomic) int cameraRotation;
|
||||||
|
|
Loading…
Reference in New Issue
Block a user