added standalone posetracking gpu example

This commit is contained in:
Mautisim Munir 2022-10-05 14:57:41 +05:00
parent 8cc8a8d954
commit 350c841dc4
20 changed files with 649 additions and 3 deletions

View File

@ -0,0 +1,21 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#import <UIKit/UIKit.h>
@interface AppDelegate : UIResponder <UIApplicationDelegate>
@property(strong, nonatomic) UIWindow *window;
@end

View File

@ -0,0 +1,61 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#import "AppDelegate.h"
#import "CommonViewController.h"
@interface AppDelegate ()
@end
@implementation AppDelegate
- (BOOL)application:(UIApplication *)application
didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
// Override point for customization after application launch.
return YES;
}
- (void)applicationWillResignActive:(UIApplication *)application {
// Sent when the application is about to move from active to inactive state. This can occur for
// certain types of temporary interruptions (such as an incoming phone call or SMS message) or
// when the user quits the application and it begins the transition to the background state. Use
// this method to pause ongoing tasks, disable timers, and invalidate graphics rendering
// callbacks. Games should use this method to pause the game.
}
- (void)applicationDidEnterBackground:(UIApplication *)application {
// Use this method to release shared resources, save user data, invalidate timers, and store
// enough application state information to restore your application to its current state in case
// it is terminated later. If your application supports background execution, this method is
// called instead of applicationWillTerminate: when the user quits.
}
- (void)applicationWillEnterForeground:(UIApplication *)application {
// Called as part of the transition from the background to the active state; here you can undo
// many of the changes made on entering the background.
}
- (void)applicationDidBecomeActive:(UIApplication *)application {
// Restart any tasks that were paused (or not yet started) while the application was inactive. If
// the application was previously in the background, optionally refresh the user interface.
}
- (void)applicationWillTerminate:(UIApplication *)application {
// Called when the application is about to terminate. Save data if appropriate. See also
// applicationDidEnterBackground:.
}
@end

Binary file not shown.

After

Width:  |  Height:  |  Size: 396 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 686 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 855 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 KiB

View File

@ -0,0 +1,106 @@
{
"images" : [
{
"idiom" : "iphone",
"size" : "20x20",
"scale" : "2x"
},
{
"idiom" : "iphone",
"size" : "20x20",
"scale" : "3x"
},
{
"idiom" : "iphone",
"size" : "29x29",
"scale" : "2x"
},
{
"idiom" : "iphone",
"size" : "29x29",
"scale" : "3x"
},
{
"idiom" : "iphone",
"size" : "40x40",
"filename" : "40_c_2x.png",
"scale" : "2x"
},
{
"idiom" : "iphone",
"size" : "40x40",
"filename" : "40_c_3x.png",
"scale" : "3x"
},
{
"idiom" : "iphone",
"size" : "60x60",
"filename" : "60_c_iphone_2x.png",
"scale" : "2x"
},
{
"idiom" : "iphone",
"size" : "60x60",
"filename" : "60_c_iphone_3x.png",
"scale" : "3x"
},
{
"idiom" : "ipad",
"size" : "20x20",
"scale" : "1x"
},
{
"idiom" : "ipad",
"size" : "20x20",
"scale" : "2x"
},
{
"idiom" : "ipad",
"size" : "29x29",
"scale" : "1x"
},
{
"idiom" : "ipad",
"size" : "29x29",
"scale" : "2x"
},
{
"idiom" : "ipad",
"size" : "40x40",
"filename" : "40_c_1x.png",
"scale" : "1x"
},
{
"idiom" : "ipad",
"size" : "40x40",
"scale" : "2x"
},
{
"idiom" : "ipad",
"size" : "76x76",
"filename" : "76_c_Ipad.png",
"scale" : "1x"
},
{
"idiom" : "ipad",
"size" : "76x76",
"filename" : "76_c_Ipad_2x.png",
"scale" : "2x"
},
{
"idiom" : "ipad",
"size" : "83.5x83.5",
"scale" : "2x"
},
{
"idiom" : "ios-marketing",
"size" : "1024x1024",
"scale" : "1x"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
}
}

View File

@ -0,0 +1,7 @@
{
"info" : {
"version" : 1,
"author" : "xcode"
}
}

View File

@ -66,7 +66,7 @@ objc_library(
"//mediapipe/modules/pose_landmark:pose_landmark_full.tflite",
],
deps = [
"//mediapipe/examples/ios/common:CommonMediaPipeAppLibrary",
":CommonMediaPipeAppLibrary",
"//mediapipe/objc/solutions/posetracking_gpu:posetracking_gpu_solution",
] + select({
"//mediapipe:ios_i386": [],
@ -77,3 +77,42 @@ objc_library(
],
}),
)
objc_library(
name = "CommonMediaPipeAppLibrary",
srcs = [
"AppDelegate.mm",
"CommonViewController.mm",
"main.m",
],
hdrs = [
"AppDelegate.h",
"CommonViewController.h",
],
data = [
"Base.lproj/LaunchScreen.storyboard",
"Base.lproj/Main.storyboard",
],
sdk_frameworks = [
"AVFoundation",
"CoreGraphics",
"CoreMedia",
"UIKit",
],
visibility = [
"//mediapipe:__subpackages__",
],
deps = [
"//mediapipe/objc:mediapipe_framework_ios",
"//mediapipe/objc:mediapipe_input_sources_ios",
"//mediapipe/objc:mediapipe_layer_renderer",
],
)
exports_files(["Info.plist"])
filegroup(
name = "AppIcon",
srcs = glob(["Assets.xcassets/AppIcon.appiconset/*"]),
visibility = ["//mediapipe:__subpackages__"],
)

View File

@ -0,0 +1,25 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="13122.16" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" launchScreen="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES" initialViewController="01J-lp-oVM">
<dependencies>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="13104.12"/>
<capability name="Safe area layout guides" minToolsVersion="9.0"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<scenes>
<!--View Controller-->
<scene sceneID="EHf-IW-A2E">
<objects>
<viewController id="01J-lp-oVM" sceneMemberID="viewController">
<view key="view" contentMode="scaleToFill" id="Ze5-6b-2t3">
<rect key="frame" x="0.0" y="0.0" width="375" height="667"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<color key="backgroundColor" red="1" green="1" blue="1" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<viewLayoutGuide key="safeArea" id="6Tk-OE-BBY"/>
</view>
</viewController>
<placeholder placeholderIdentifier="IBFirstResponder" id="iYj-Kq-Ea1" userLabel="First Responder" sceneMemberID="firstResponder"/>
</objects>
<point key="canvasLocation" x="53" y="375"/>
</scene>
</scenes>
</document>

View File

@ -0,0 +1,49 @@
<?xml version="1.0" encoding="UTF-8"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="16097" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES" initialViewController="BYZ-38-t0r">
<device id="retina4_7" orientation="portrait" appearance="light"/>
<dependencies>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="16087"/>
<capability name="Safe area layout guides" minToolsVersion="9.0"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<scenes>
<!--Common View Controller-->
<scene sceneID="tne-QT-ifu">
<objects>
<viewController id="BYZ-38-t0r" customClass="CommonViewController" sceneMemberID="viewController">
<view key="view" contentMode="scaleToFill" id="8bC-Xf-vdC">
<rect key="frame" x="0.0" y="0.0" width="375" height="667"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<subviews>
<view contentMode="scaleToFill" fixedFrame="YES" translatesAutoresizingMaskIntoConstraints="NO" id="EfB-xq-knP">
<rect key="frame" x="0.0" y="0.0" width="375" height="667"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<subviews>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" fixedFrame="YES" text="Camera access needed for this demo. Please enable camera access in the Settings app." textAlignment="center" lineBreakMode="tailTruncation" numberOfLines="0" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="emf-N5-sEd">
<rect key="frame" x="57" y="258" width="260" height="151"/>
<autoresizingMask key="autoresizingMask" flexibleMinX="YES" flexibleMaxX="YES" flexibleMinY="YES" flexibleMaxY="YES"/>
<fontDescription key="fontDescription" type="system" pointSize="17"/>
<color key="textColor" white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<nil key="highlightedColor"/>
</label>
</subviews>
<color key="backgroundColor" white="0.0" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<accessibility key="accessibilityConfiguration" label="PreviewDisplayView">
<bool key="isElement" value="YES"/>
</accessibility>
</view>
</subviews>
<color key="backgroundColor" red="1" green="1" blue="1" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<viewLayoutGuide key="safeArea" id="6Tk-OE-BBY"/>
</view>
<connections>
<outlet property="liveView" destination="8bC-Xf-vdC" id="3qM-tM-inb"/>
<outlet property="noCameraLabel" destination="emf-N5-sEd" id="TUU-KL-fTU"/>
</connections>
</viewController>
<placeholder placeholderIdentifier="IBFirstResponder" id="dkx-z0-nzr" sceneMemberID="firstResponder"/>
</objects>
<point key="canvasLocation" x="48.799999999999997" y="20.239880059970016"/>
</scene>
</scenes>
</document>

View File

@ -0,0 +1,67 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#import <UIKit/UIKit.h>
#import "mediapipe/objc/MPPCameraInputSource.h"
#import "mediapipe/objc/MPPGraph.h"
#import "mediapipe/objc/MPPLayerRenderer.h"
#import "mediapipe/objc/MPPPlayerInputSource.h"
#import "mediapipe/objc/MPPTimestampConverter.h"
typedef NS_ENUM(NSInteger, MediaPipeDemoSourceMode) {
MediaPipeDemoSourceCamera,
MediaPipeDemoSourceVideo
};
@interface CommonViewController : UIViewController <MPPGraphDelegate, MPPInputSourceDelegate>
// The MediaPipe graph currently in use. Initialized in viewDidLoad, started in
// viewWillAppear: and sent video frames on videoQueue.
@property(nonatomic) MPPGraph* mediapipeGraph;
// Handles camera access via AVCaptureSession library.
@property(nonatomic) MPPCameraInputSource* cameraSource;
// Provides data from a video.
@property(nonatomic) MPPPlayerInputSource* videoSource;
// Helps to convert timestamp.
@property(nonatomic) MPPTimestampConverter* timestampConverter;
// The data source for the demo.
@property(nonatomic) MediaPipeDemoSourceMode sourceMode;
// Inform the user when camera is unavailable.
@property(nonatomic) IBOutlet UILabel* noCameraLabel;
// Display the camera preview frames.
@property(strong, nonatomic) IBOutlet UIView* liveView;
// Render frames in a layer.
@property(nonatomic) MPPLayerRenderer* renderer;
// Process camera frames on this queue.
@property(nonatomic) dispatch_queue_t videoQueue;
// Graph name.
@property(nonatomic) NSString* graphName;
// Graph input stream.
@property(nonatomic) const char* graphInputStream;
// Graph output stream.
@property(nonatomic) const char* graphOutputStream;
@end

View File

@ -0,0 +1,201 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#import "CommonViewController.h"
static const char* kVideoQueueLabel = "com.google.mediapipe.example.videoQueue";
@implementation CommonViewController
// This provides a hook to replace the basic ViewController with a subclass when it's created from a
// storyboard, without having to change the storyboard itself.
+ (instancetype)allocWithZone:(struct _NSZone*)zone {
NSString* subclassName = [[NSBundle mainBundle] objectForInfoDictionaryKey:@"MainViewController"];
if (subclassName.length > 0) {
Class customClass = NSClassFromString(subclassName);
Class baseClass = [CommonViewController class];
NSAssert([customClass isSubclassOfClass:baseClass], @"%@ must be a subclass of %@", customClass,
baseClass);
if (self == baseClass) return [customClass allocWithZone:zone];
}
return [super allocWithZone:zone];
}
#pragma mark - Cleanup methods
- (void)dealloc {
self.mediapipeGraph.delegate = nil;
[self.mediapipeGraph cancel];
// Ignore errors since we're cleaning up.
[self.mediapipeGraph closeAllInputStreamsWithError:nil];
[self.mediapipeGraph waitUntilDoneWithError:nil];
}
#pragma mark - MediaPipe graph methods
+ (MPPGraph*)loadGraphFromResource:(NSString*)resource {
// Load the graph config resource.
NSError* configLoadError = nil;
NSBundle* bundle = [NSBundle bundleForClass:[self class]];
if (!resource || resource.length == 0) {
return nil;
}
NSURL* graphURL = [bundle URLForResource:resource withExtension:@"binarypb"];
NSData* data = [NSData dataWithContentsOfURL:graphURL options:0 error:&configLoadError];
if (!data) {
NSLog(@"Failed to load MediaPipe graph config: %@", configLoadError);
return nil;
}
// Parse the graph config resource into mediapipe::CalculatorGraphConfig proto object.
mediapipe::CalculatorGraphConfig config;
config.ParseFromArray(data.bytes, data.length);
// Create MediaPipe graph with mediapipe::CalculatorGraphConfig proto object.
MPPGraph* newGraph = [[MPPGraph alloc] initWithGraphConfig:config];
return newGraph;
}
#pragma mark - UIViewController methods
- (void)viewDidLoad {
[super viewDidLoad];
self.renderer = [[MPPLayerRenderer alloc] init];
self.renderer.layer.frame = self.liveView.layer.bounds;
[self.liveView.layer addSublayer:self.renderer.layer];
self.renderer.frameScaleMode = MPPFrameScaleModeFillAndCrop;
self.timestampConverter = [[MPPTimestampConverter alloc] init];
dispatch_queue_attr_t qosAttribute = dispatch_queue_attr_make_with_qos_class(
DISPATCH_QUEUE_SERIAL, QOS_CLASS_USER_INTERACTIVE, /*relative_priority=*/0);
self.videoQueue = dispatch_queue_create(kVideoQueueLabel, qosAttribute);
self.graphName = [[NSBundle mainBundle] objectForInfoDictionaryKey:@"GraphName"];
self.graphInputStream =
[[[NSBundle mainBundle] objectForInfoDictionaryKey:@"GraphInputStream"] UTF8String];
self.graphOutputStream =
[[[NSBundle mainBundle] objectForInfoDictionaryKey:@"GraphOutputStream"] UTF8String];
self.mediapipeGraph = [[self class] loadGraphFromResource:self.graphName];
[self.mediapipeGraph addFrameOutputStream:self.graphOutputStream
outputPacketType:MPPPacketTypePixelBuffer];
self.mediapipeGraph.delegate = self;
}
// In this application, there is only one ViewController which has no navigation to other view
// controllers, and there is only one View with live display showing the result of running the
// MediaPipe graph on the live video feed. If more view controllers are needed later, the graph
// setup/teardown and camera start/stop logic should be updated appropriately in response to the
// appearance/disappearance of this ViewController, as viewWillAppear: can be invoked multiple times
// depending on the application navigation flow in that case.
- (void)viewWillAppear:(BOOL)animated {
[super viewWillAppear:animated];
switch (self.sourceMode) {
case MediaPipeDemoSourceVideo: {
NSString* videoName = [[NSBundle mainBundle] objectForInfoDictionaryKey:@"VideoName"];
AVAsset* video = [AVAsset assetWithURL:[[NSBundle mainBundle] URLForResource:videoName
withExtension:@"mov"]];
self.videoSource = [[MPPPlayerInputSource alloc] initWithAVAsset:video];
[self.videoSource setDelegate:self queue:self.videoQueue];
dispatch_async(self.videoQueue, ^{
[self.videoSource start];
});
break;
}
case MediaPipeDemoSourceCamera: {
self.cameraSource = [[MPPCameraInputSource alloc] init];
[self.cameraSource setDelegate:self queue:self.videoQueue];
self.cameraSource.sessionPreset = AVCaptureSessionPresetHigh;
NSString* cameraPosition =
[[NSBundle mainBundle] objectForInfoDictionaryKey:@"CameraPosition"];
if (cameraPosition.length > 0 && [cameraPosition isEqualToString:@"back"]) {
self.cameraSource.cameraPosition = AVCaptureDevicePositionBack;
} else {
self.cameraSource.cameraPosition = AVCaptureDevicePositionFront;
// When using the front camera, mirror the input for a more natural look.
_cameraSource.videoMirrored = YES;
}
// The frame's native format is rotated with respect to the portrait orientation.
_cameraSource.orientation = AVCaptureVideoOrientationPortrait;
[self.cameraSource requestCameraAccessWithCompletionHandler:^void(BOOL granted) {
if (granted) {
dispatch_async(dispatch_get_main_queue(), ^{
self.noCameraLabel.hidden = YES;
});
[self startGraphAndCamera];
}
}];
break;
}
}
}
- (void)startGraphAndCamera {
// Start running self.mediapipeGraph.
NSError* error;
if (![self.mediapipeGraph startWithError:&error]) {
NSLog(@"Failed to start graph: %@", error);
}
else if (![self.mediapipeGraph waitUntilIdleWithError:&error]) {
NSLog(@"Failed to complete graph initial run: %@", error);
}
// Start fetching frames from the camera.
dispatch_async(self.videoQueue, ^{
[self.cameraSource start];
});
}
#pragma mark - MPPInputSourceDelegate methods
// Must be invoked on self.videoQueue.
- (void)processVideoFrame:(CVPixelBufferRef)imageBuffer
timestamp:(CMTime)timestamp
fromSource:(MPPInputSource*)source {
if (source != self.cameraSource && source != self.videoSource) {
NSLog(@"Unknown source: %@", source);
return;
}
[self.mediapipeGraph sendPixelBuffer:imageBuffer
intoStream:self.graphInputStream
packetType:MPPPacketTypePixelBuffer
timestamp:[self.timestampConverter timestampForMediaTime:timestamp]];
}
#pragma mark - MPPGraphDelegate methods
// Receives CVPixelBufferRef from the MediaPipe graph. Invoked on a MediaPipe worker thread.
- (void)mediapipeGraph:(MPPGraph*)graph
didOutputPixelBuffer:(CVPixelBufferRef)pixelBuffer
fromStream:(const std::string&)streamName {
if (streamName == self.graphOutputStream) {
// Display the captured image on the screen.
CVPixelBufferRetain(pixelBuffer);
dispatch_async(dispatch_get_main_queue(), ^{
[self.renderer renderPixelBuffer:pixelBuffer];
CVPixelBufferRelease(pixelBuffer);
});
}
}
@end

View File

@ -2,6 +2,44 @@
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>NSCameraUsageDescription</key>
<string>This app uses the camera to demonstrate live video processing.</string>
<key>CFBundleDevelopmentRegion</key>
<string>en</string>
<key>CFBundleExecutable</key>
<string>$(EXECUTABLE_NAME)</string>
<key>CFBundleIdentifier</key>
<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>$(PRODUCT_NAME)</string>
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleShortVersionString</key>
<string>1.0</string>
<key>CFBundleVersion</key>
<string>1</string>
<key>LSRequiresIPhoneOS</key>
<true/>
<key>UILaunchStoryboardName</key>
<string>LaunchScreen</string>
<key>UIMainStoryboardFile</key>
<string>Main</string>
<key>UIRequiredDeviceCapabilities</key>
<array>
<string>armv7</string>
</array>
<key>UISupportedInterfaceOrientations</key>
<array>
<string>UIInterfaceOrientationPortrait</string>
</array>
<key>UISupportedInterfaceOrientations~ipad</key>
<array>
<string>UIInterfaceOrientationPortrait</string>
</array>
<key>CameraPosition</key>
<string>back</string>
<key>MainViewController</key>

View File

@ -14,7 +14,7 @@
#import <UIKit/UIKit.h>
#import "mediapipe/examples/ios/common/CommonViewController.h"
#import "CommonViewController.h"
@interface PoseTrackingViewController : CommonViewController

View File

@ -15,6 +15,7 @@
#import "PoseTrackingViewController.h"
#include "mediapipe/framework/formats/landmark.pb.h"
#include "mediapipe/objc/solutions/posetracking_gpu/PoseTrackingOptions.h"
static const char* kLandmarksOutputStream = "pose_landmarks";
@ -23,10 +24,19 @@ static const char* kLandmarksOutputStream = "pose_landmarks";
#pragma mark - UIViewController methods
- (void)viewDidLoad {
[super viewDidLoad];
PoseTrackingOptions* options = [ [PoseTrackingOptions alloc] initWithShowLandmarks:true cameraRotation:0];
[self.mediapipeGraph addFrameOutputStream:kLandmarksOutputStream
outputPacketType:MPPPacketTypeRaw];
[self.mediapipeGraph addFrameOutputStream:"throttled_input_video"
outputPacketType:MPPPacketTypePixelBuffer];
if (options.showLandmarks){
self.graphOutputStream = "output_video";
}else{
self.graphOutputStream = "throttled_input_video";
}
}
#pragma mark - MPPGraphDelegate methods

View File

@ -0,0 +1,22 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#import <UIKit/UIKit.h>
#import "AppDelegate.h"
int main(int argc, char * argv[]) {
@autoreleasepool {
return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class]));
}
}