mediapipe tasks ios vision add poseLandmarker , and add an ios example of PoseLandmarkerApp
This commit is contained in:
parent
39b31e51a9
commit
cbbbc9e49e
|
@ -17,27 +17,29 @@
|
|||
"mediapipe/examples/ios/objectdetectiontrackinggpu/BUILD",
|
||||
"mediapipe/examples/ios/posetrackinggpu/BUILD",
|
||||
"mediapipe/examples/ios/selfiesegmentationgpu/BUILD",
|
||||
"mediapipe/examples/ios/PoseLandmarkerApp/BUILD",
|
||||
"mediapipe/framework/BUILD",
|
||||
"mediapipe/gpu/BUILD",
|
||||
"mediapipe/objc/BUILD",
|
||||
"mediapipe/objc/testing/app/BUILD"
|
||||
],
|
||||
"buildTargets" : [
|
||||
"//mediapipe/examples/ios/facedetectioncpu:FaceDetectionCpuApp",
|
||||
"//mediapipe/examples/ios/facedetectiongpu:FaceDetectionGpuApp",
|
||||
"//mediapipe/examples/ios/faceeffect:FaceEffectApp",
|
||||
"//mediapipe/examples/ios/facemeshgpu:FaceMeshGpuApp",
|
||||
"//mediapipe/examples/ios/handdetectiongpu:HandDetectionGpuApp",
|
||||
"//mediapipe/examples/ios/handtrackinggpu:HandTrackingGpuApp",
|
||||
"//mediapipe/examples/ios/helloworld:HelloWorldApp",
|
||||
"//mediapipe/examples/ios/holistictrackinggpu:HolisticTrackingGpuApp",
|
||||
"//mediapipe/examples/ios/iristrackinggpu:IrisTrackingGpuApp",
|
||||
"//mediapipe/examples/ios/objectdetectioncpu:ObjectDetectionCpuApp",
|
||||
"//mediapipe/examples/ios/objectdetectiongpu:ObjectDetectionGpuApp",
|
||||
"//mediapipe/examples/ios/objectdetectiontrackinggpu:ObjectDetectionTrackingGpuApp",
|
||||
"//mediapipe/examples/ios/posetrackinggpu:PoseTrackingGpuApp",
|
||||
"//mediapipe/examples/ios/selfiesegmentationgpu:SelfieSegmentationGpuApp",
|
||||
"//mediapipe/objc:mediapipe_framework_ios"
|
||||
"@//mediapipe/examples/ios/facedetectioncpu:FaceDetectionCpuApp",
|
||||
"@//mediapipe/examples/ios/facedetectiongpu:FaceDetectionGpuApp",
|
||||
"@//mediapipe/examples/ios/faceeffect:FaceEffectApp",
|
||||
"@//mediapipe/examples/ios/facemeshgpu:FaceMeshGpuApp",
|
||||
"@//mediapipe/examples/ios/handdetectiongpu:HandDetectionGpuApp",
|
||||
"@//mediapipe/examples/ios/handtrackinggpu:HandTrackingGpuApp",
|
||||
"@//mediapipe/examples/ios/helloworld:HelloWorldApp",
|
||||
"@//mediapipe/examples/ios/holistictrackinggpu:HolisticTrackingGpuApp",
|
||||
"@//mediapipe/examples/ios/iristrackinggpu:IrisTrackingGpuApp",
|
||||
"@//mediapipe/examples/ios/objectdetectioncpu:ObjectDetectionCpuApp",
|
||||
"@//mediapipe/examples/ios/objectdetectiongpu:ObjectDetectionGpuApp",
|
||||
"@//mediapipe/examples/ios/objectdetectiontrackinggpu:ObjectDetectionTrackingGpuApp",
|
||||
"@//mediapipe/examples/ios/posetrackinggpu:PoseTrackingGpuApp",
|
||||
"@//mediapipe/examples/ios/selfiesegmentationgpu:SelfieSegmentationGpuApp",
|
||||
"@//mediapipe/examples/ios/PoseLandmarkerApp:PoseLandmarkerApp",
|
||||
"@//mediapipe/objc:mediapipe_framework_ios"
|
||||
],
|
||||
"optionSet" : {
|
||||
"BazelBuildOptionsDebug" : {
|
||||
|
@ -106,6 +108,7 @@
|
|||
"mediapipe/examples/ios/objectdetectiongpu",
|
||||
"mediapipe/examples/ios/posetrackinggpu",
|
||||
"mediapipe/examples/ios/selfiesegmentationgpu",
|
||||
"mediapipe/examples/ios/PoseLandmarkerApp",
|
||||
"mediapipe/framework",
|
||||
"mediapipe/framework/deps",
|
||||
"mediapipe/framework/formats",
|
||||
|
|
14
mediapipe/examples/ios/PoseLandmarkerApp/AppDelegate.h
Normal file
14
mediapipe/examples/ios/PoseLandmarkerApp/AppDelegate.h
Normal file
|
@ -0,0 +1,14 @@
|
|||
//
|
||||
// AppDelegate.h
|
||||
// PoseLandmarkerApp
|
||||
//
|
||||
// Created by zhuzhiwen on 2023/7/28.
|
||||
//
|
||||
|
||||
#import <UIKit/UIKit.h>
|
||||
|
||||
@interface AppDelegate : UIResponder <UIApplicationDelegate>
|
||||
|
||||
|
||||
@end
|
||||
|
40
mediapipe/examples/ios/PoseLandmarkerApp/AppDelegate.m
Normal file
40
mediapipe/examples/ios/PoseLandmarkerApp/AppDelegate.m
Normal file
|
@ -0,0 +1,40 @@
|
|||
//
|
||||
// AppDelegate.m
|
||||
// PoseLandmarkerApp
|
||||
//
|
||||
// Created by zhuzhiwen on 2023/7/28.
|
||||
//
|
||||
|
||||
#import "AppDelegate.h"
|
||||
|
||||
@interface AppDelegate ()
|
||||
|
||||
@end
|
||||
|
||||
@implementation AppDelegate
|
||||
|
||||
|
||||
- (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
|
||||
// Override point for customization after application launch.
|
||||
return YES;
|
||||
}
|
||||
|
||||
|
||||
#pragma mark - UISceneSession lifecycle
|
||||
|
||||
|
||||
- (UISceneConfiguration *)application:(UIApplication *)application configurationForConnectingSceneSession:(UISceneSession *)connectingSceneSession options:(UISceneConnectionOptions *)options {
|
||||
// Called when a new scene session is being created.
|
||||
// Use this method to select a configuration to create the new scene with.
|
||||
return [[UISceneConfiguration alloc] initWithName:@"Default Configuration" sessionRole:connectingSceneSession.role];
|
||||
}
|
||||
|
||||
|
||||
- (void)application:(UIApplication *)application didDiscardSceneSessions:(NSSet<UISceneSession *> *)sceneSessions {
|
||||
// Called when the user discards a scene session.
|
||||
// If any sessions were discarded while the application was not running, this will be called shortly after application:didFinishLaunchingWithOptions.
|
||||
// Use this method to release any resources that were specific to the discarded scenes, as they will not return.
|
||||
}
|
||||
|
||||
|
||||
@end
|
|
@ -0,0 +1,11 @@
|
|||
{
|
||||
"colors" : [
|
||||
{
|
||||
"idiom" : "universal"
|
||||
}
|
||||
],
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
|
@ -0,0 +1,13 @@
|
|||
{
|
||||
"images" : [
|
||||
{
|
||||
"idiom" : "universal",
|
||||
"platform" : "ios",
|
||||
"size" : "1024x1024"
|
||||
}
|
||||
],
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
|
@ -0,0 +1,6 @@
|
|||
{
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
132
mediapipe/examples/ios/PoseLandmarkerApp/BUILD
Normal file
132
mediapipe/examples/ios/PoseLandmarkerApp/BUILD
Normal file
|
@ -0,0 +1,132 @@
|
|||
# Copyright 2020 The MediaPipe Authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# python3 mediapipe/examples/ios/link_local_profiles.py
|
||||
#bazel build -c opt --config=ios_arm64 mediapipe/examples/ios/PoseLandmarkerApp:PoseLandmarkerApp
|
||||
|
||||
load(
|
||||
"//mediapipe/framework/tool:ios.bzl",
|
||||
"MPP_TASK_MINIMUM_OS_VERSION",
|
||||
"strip_api_include_path_prefix",
|
||||
)
|
||||
|
||||
load(
|
||||
"@build_bazel_rules_apple//apple:ios.bzl",
|
||||
"ios_framework",
|
||||
)
|
||||
|
||||
load(
|
||||
"//mediapipe/examples/ios:bundle_id.bzl",
|
||||
"BUNDLE_ID_PREFIX",
|
||||
"example_provisioning",
|
||||
)
|
||||
|
||||
package(default_visibility = ["//visibility:public"])
|
||||
|
||||
licenses(["notice"])
|
||||
|
||||
MIN_IOS_VERSION = MPP_TASK_MINIMUM_OS_VERSION
|
||||
|
||||
alias(
|
||||
name = "poselandmarkerapp",
|
||||
actual = "PoseLandmarkerApp",
|
||||
)
|
||||
|
||||
OBJC_TASK_COMMON_DEPS = [
|
||||
"//mediapipe/tasks/ios/core/utils:MPPBaseOptionsHelpers",
|
||||
"//mediapipe/tasks/ios/core:MPPTaskInfo",
|
||||
"//mediapipe/tasks/ios/core:MPPTaskOptions",
|
||||
"//mediapipe/tasks/ios/core:MPPTaskResult",
|
||||
"//mediapipe/tasks/ios/core:MPPTaskRunner",
|
||||
"//mediapipe/tasks/ios/components/containers/utils:MPPClassificationResultHelpers",
|
||||
"//mediapipe/tasks/ios/components/containers/utils:MPPCategoryHelpers",
|
||||
"//mediapipe/tasks/ios/common/utils:MPPCommonUtils",
|
||||
"//mediapipe/tasks/ios/vision/core:MPPMask",
|
||||
"//mediapipe/tasks/ios/components/containers:MPPLandmark",
|
||||
"//mediapipe/tasks/ios/components/containers:MPPConnection",
|
||||
"//mediapipe/tasks/ios/components/containers:MPPDetection",
|
||||
"//mediapipe/tasks/ios/components/containers:MPPEmbeddingResult",
|
||||
"//mediapipe/tasks/ios/components/containers:MPPEmbedding",
|
||||
"//mediapipe/tasks/ios/components/containers:MPPClassificationResult",
|
||||
"//mediapipe/tasks/ios/components/containers:MPPCategory",
|
||||
]
|
||||
|
||||
CALCULATORS_AND_GRAPHS = [
|
||||
"//mediapipe/calculators/core:flow_limiter_calculator",
|
||||
"//mediapipe/tasks/cc/vision/pose_landmarker:pose_landmarker_graph",
|
||||
]
|
||||
|
||||
strip_api_include_path_prefix(
|
||||
name = "strip_api_include_path",
|
||||
hdr_labels = [
|
||||
"//mediapipe/tasks/ios/common:sources/MPPCommon.h",
|
||||
"//mediapipe/tasks/ios/components/containers:sources/MPPCategory.h",
|
||||
"//mediapipe/tasks/ios/components/containers:sources/MPPClassificationResult.h",
|
||||
"//mediapipe/tasks/ios/components/containers:sources/MPPEmbedding.h",
|
||||
"//mediapipe/tasks/ios/components/containers:sources/MPPEmbeddingResult.h",
|
||||
"//mediapipe/tasks/ios/components/containers:sources/MPPConnection.h",
|
||||
"//mediapipe/tasks/ios/components/containers:sources/MPPDetection.h",
|
||||
"//mediapipe/tasks/ios/components/containers:sources/MPPLandmark.h",
|
||||
"//mediapipe/tasks/ios/vision/core:sources/MPPMask.h",
|
||||
"//mediapipe/tasks/ios/core:sources/MPPBaseOptions.h",
|
||||
"//mediapipe/tasks/ios/core:sources/MPPTaskOptions.h",
|
||||
"//mediapipe/tasks/ios/core:sources/MPPTaskResult.h",
|
||||
"//mediapipe/tasks/ios/vision/core:sources/MPPRunningMode.h",
|
||||
"//mediapipe/tasks/ios/vision/core:sources/MPPImage.h",
|
||||
"//mediapipe/tasks/ios/vision/pose_landmarker:sources/MPPPoseLandmarker.h",
|
||||
"//mediapipe/tasks/ios/vision/pose_landmarker:sources/MPPPoseLandmarkerOptions.h",
|
||||
"//mediapipe/tasks/ios/vision/pose_landmarker:sources/MPPPoseLandmarkerResult.h",
|
||||
],
|
||||
)
|
||||
|
||||
ios_framework(
|
||||
name = "PoseLandmarkerApp",
|
||||
hdrs = [
|
||||
"PoseLandmarkerSDK.h",
|
||||
":MPPBaseOptions.h",
|
||||
":MPPCategory.h",
|
||||
":MPPClassificationResult.h",
|
||||
":MPPDetection.h",
|
||||
":MPPLandmark.h",
|
||||
":MPPConnection.h",
|
||||
":MPPCommon.h",
|
||||
":MPPTaskOptions.h",
|
||||
":MPPTaskResult.h",
|
||||
":MPPImage.h",
|
||||
":MPPRunningMode.h",
|
||||
":MPPPoseLandmarker.h",
|
||||
":MPPPoseLandmarkerOptions.h",
|
||||
":MPPPoseLandmarkerResult.h",
|
||||
":MPPMask.h",
|
||||
],
|
||||
bundle_id = "com.google.mediapipe.poselandmarkersdk",
|
||||
bundle_name = "PoseLandmarkerSDK",
|
||||
families = [
|
||||
"iphone",
|
||||
"ipad",
|
||||
],
|
||||
resources = [
|
||||
"Modules"
|
||||
],
|
||||
infoplists = [
|
||||
"Info.plist",
|
||||
],
|
||||
minimum_os_version = MIN_IOS_VERSION,
|
||||
visibility = ["//visibility:public"],
|
||||
deps = OBJC_TASK_COMMON_DEPS + CALCULATORS_AND_GRAPHS + [
|
||||
"@ios_opencv//:OpencvFramework",
|
||||
"@org_tensorflow//third_party/icu/data:conversion_data",
|
||||
"//mediapipe/tasks/ios/vision/pose_landmarker:MPPPoseLandmarker",
|
||||
],
|
||||
)
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="13122.16" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" launchScreen="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES" initialViewController="01J-lp-oVM">
|
||||
<dependencies>
|
||||
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="13104.12"/>
|
||||
<capability name="Safe area layout guides" minToolsVersion="9.0"/>
|
||||
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
|
||||
</dependencies>
|
||||
<scenes>
|
||||
<!--View Controller-->
|
||||
<scene sceneID="EHf-IW-A2E">
|
||||
<objects>
|
||||
<viewController id="01J-lp-oVM" sceneMemberID="viewController">
|
||||
<view key="view" contentMode="scaleToFill" id="Ze5-6b-2t3">
|
||||
<rect key="frame" x="0.0" y="0.0" width="375" height="667"/>
|
||||
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
|
||||
<color key="backgroundColor" xcode11CocoaTouchSystemColor="systemBackgroundColor" cocoaTouchSystemColor="whiteColor"/>
|
||||
<viewLayoutGuide key="safeArea" id="6Tk-OE-BBY"/>
|
||||
</view>
|
||||
</viewController>
|
||||
<placeholder placeholderIdentifier="IBFirstResponder" id="iYj-Kq-Ea1" userLabel="First Responder" sceneMemberID="firstResponder"/>
|
||||
</objects>
|
||||
<point key="canvasLocation" x="53" y="375"/>
|
||||
</scene>
|
||||
</scenes>
|
||||
</document>
|
|
@ -0,0 +1,24 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="13122.16" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES" initialViewController="BYZ-38-t0r">
|
||||
<dependencies>
|
||||
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="13104.12"/>
|
||||
<capability name="Safe area layout guides" minToolsVersion="9.0"/>
|
||||
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
|
||||
</dependencies>
|
||||
<scenes>
|
||||
<!--View Controller-->
|
||||
<scene sceneID="tne-QT-ifu">
|
||||
<objects>
|
||||
<viewController id="BYZ-38-t0r" customClass="ViewController" customModuleProvider="" sceneMemberID="viewController">
|
||||
<view key="view" contentMode="scaleToFill" id="8bC-Xf-vdC">
|
||||
<rect key="frame" x="0.0" y="0.0" width="375" height="667"/>
|
||||
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
|
||||
<color key="backgroundColor" xcode11CocoaTouchSystemColor="systemBackgroundColor" cocoaTouchSystemColor="whiteColor"/>
|
||||
<viewLayoutGuide key="safeArea" id="6Tk-OE-BBY"/>
|
||||
</view>
|
||||
</viewController>
|
||||
<placeholder placeholderIdentifier="IBFirstResponder" id="dkx-z0-nzr" sceneMemberID="firstResponder"/>
|
||||
</objects>
|
||||
</scene>
|
||||
</scenes>
|
||||
</document>
|
25
mediapipe/examples/ios/PoseLandmarkerApp/Info.plist
Normal file
25
mediapipe/examples/ios/PoseLandmarkerApp/Info.plist
Normal file
|
@ -0,0 +1,25 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>UIApplicationSceneManifest</key>
|
||||
<dict>
|
||||
<key>UIApplicationSupportsMultipleScenes</key>
|
||||
<false/>
|
||||
<key>UISceneConfigurations</key>
|
||||
<dict>
|
||||
<key>UIWindowSceneSessionRoleApplication</key>
|
||||
<array>
|
||||
<dict>
|
||||
<key>UISceneConfigurationName</key>
|
||||
<string>Default Configuration</string>
|
||||
<key>UISceneDelegateClassName</key>
|
||||
<string>SceneDelegate</string>
|
||||
<key>UISceneStoryboardFile</key>
|
||||
<string>Main</string>
|
||||
</dict>
|
||||
</array>
|
||||
</dict>
|
||||
</dict>
|
||||
</dict>
|
||||
</plist>
|
|
@ -0,0 +1,21 @@
|
|||
framework module PoseLandmarkerSDK {
|
||||
umbrella header "PoseLandmarkerSDK.h"
|
||||
|
||||
export *
|
||||
module * { export * }
|
||||
|
||||
link framework "AVFoundation"
|
||||
link framework "Accelerate"
|
||||
link framework "AssetsLibrary"
|
||||
link framework "CoreFoundation"
|
||||
link framework "CoreGraphics"
|
||||
link framework "CoreImage"
|
||||
link framework "CoreMedia"
|
||||
link framework "CoreVideo"
|
||||
link framework "GLKit"
|
||||
link framework "Metal"
|
||||
link framework "MetalKit"
|
||||
link framework "OpenGLES"
|
||||
link framework "QuartzCore"
|
||||
link framework "UIKit"
|
||||
}
|
31
mediapipe/examples/ios/PoseLandmarkerApp/PoseLandmarkerSDK.h
Normal file
31
mediapipe/examples/ios/PoseLandmarkerApp/PoseLandmarkerSDK.h
Normal file
|
@ -0,0 +1,31 @@
|
|||
//
|
||||
// PoseLandmarker.h
|
||||
// PoseLandmarker
|
||||
//
|
||||
// Created by zhuzhiwen on 2023/7/24.
|
||||
//
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
//! Project version number for PoseLandmarker.
|
||||
FOUNDATION_EXPORT double PoseLandmarkerSDKVersionNumber;
|
||||
|
||||
//! Project version string for PoseLandmarker.
|
||||
FOUNDATION_EXPORT const unsigned char PoseLandmarkerSDKVersionString[];
|
||||
|
||||
// In this header you should import all the public headers of your framework using statements like #import "PoseLandmarker/PublicHeader.h"
|
||||
#import "MPPBaseOptions.h"
|
||||
#import "MPPCategory.h"
|
||||
#import "MPPClassificationResult.h"
|
||||
#import "MPPDetection.h"
|
||||
#import "MPPLandmark.h"
|
||||
#import "MPPConnection.h"
|
||||
#import "MPPCommon.h"
|
||||
#import "MPPTaskOptions.h"
|
||||
#import "MPPTaskResult.h"
|
||||
#import "MPPImage.h"
|
||||
#import "MPPRunningMode.h"
|
||||
#import "MPPPoseLandmarker.h"
|
||||
#import "MPPPoseLandmarkerOptions.h"
|
||||
#import "MPPPoseLandmarkerResult.h"
|
||||
#import "MPPMask.h"
|
15
mediapipe/examples/ios/PoseLandmarkerApp/SceneDelegate.h
Normal file
15
mediapipe/examples/ios/PoseLandmarkerApp/SceneDelegate.h
Normal file
|
@ -0,0 +1,15 @@
|
|||
//
|
||||
// SceneDelegate.h
|
||||
// PoseLandmarkerApp
|
||||
//
|
||||
// Created by zhuzhiwen on 2023/7/28.
|
||||
//
|
||||
|
||||
#import <UIKit/UIKit.h>
|
||||
|
||||
@interface SceneDelegate : UIResponder <UIWindowSceneDelegate>
|
||||
|
||||
@property (strong, nonatomic) UIWindow * window;
|
||||
|
||||
@end
|
||||
|
57
mediapipe/examples/ios/PoseLandmarkerApp/SceneDelegate.m
Normal file
57
mediapipe/examples/ios/PoseLandmarkerApp/SceneDelegate.m
Normal file
|
@ -0,0 +1,57 @@
|
|||
//
|
||||
// SceneDelegate.m
|
||||
// PoseLandmarkerApp
|
||||
//
|
||||
// Created by zhuzhiwen on 2023/7/28.
|
||||
//
|
||||
|
||||
#import "SceneDelegate.h"
|
||||
|
||||
@interface SceneDelegate ()
|
||||
|
||||
@end
|
||||
|
||||
@implementation SceneDelegate
|
||||
|
||||
|
||||
- (void)scene:(UIScene *)scene willConnectToSession:(UISceneSession *)session options:(UISceneConnectionOptions *)connectionOptions {
|
||||
// Use this method to optionally configure and attach the UIWindow `window` to the provided UIWindowScene `scene`.
|
||||
// If using a storyboard, the `window` property will automatically be initialized and attached to the scene.
|
||||
// This delegate does not imply the connecting scene or session are new (see `application:configurationForConnectingSceneSession` instead).
|
||||
}
|
||||
|
||||
|
||||
- (void)sceneDidDisconnect:(UIScene *)scene {
|
||||
// Called as the scene is being released by the system.
|
||||
// This occurs shortly after the scene enters the background, or when its session is discarded.
|
||||
// Release any resources associated with this scene that can be re-created the next time the scene connects.
|
||||
// The scene may re-connect later, as its session was not necessarily discarded (see `application:didDiscardSceneSessions` instead).
|
||||
}
|
||||
|
||||
|
||||
- (void)sceneDidBecomeActive:(UIScene *)scene {
|
||||
// Called when the scene has moved from an inactive state to an active state.
|
||||
// Use this method to restart any tasks that were paused (or not yet started) when the scene was inactive.
|
||||
}
|
||||
|
||||
|
||||
- (void)sceneWillResignActive:(UIScene *)scene {
|
||||
// Called when the scene will move from an active state to an inactive state.
|
||||
// This may occur due to temporary interruptions (ex. an incoming phone call).
|
||||
}
|
||||
|
||||
|
||||
- (void)sceneWillEnterForeground:(UIScene *)scene {
|
||||
// Called as the scene transitions from the background to the foreground.
|
||||
// Use this method to undo the changes made on entering the background.
|
||||
}
|
||||
|
||||
|
||||
- (void)sceneDidEnterBackground:(UIScene *)scene {
|
||||
// Called as the scene transitions from the foreground to the background.
|
||||
// Use this method to save data, release shared resources, and store enough scene-specific state information
|
||||
// to restore the scene back to its current state.
|
||||
}
|
||||
|
||||
|
||||
@end
|
14
mediapipe/examples/ios/PoseLandmarkerApp/ViewController.h
Normal file
14
mediapipe/examples/ios/PoseLandmarkerApp/ViewController.h
Normal file
|
@ -0,0 +1,14 @@
|
|||
//
|
||||
// ViewController.h
|
||||
// PoseLandmarkerApp
|
||||
//
|
||||
// Created by zhuzhiwen on 2023/7/28.
|
||||
//
|
||||
|
||||
#import <UIKit/UIKit.h>
|
||||
|
||||
@interface ViewController : UIViewController
|
||||
|
||||
|
||||
@end
|
||||
|
22
mediapipe/examples/ios/PoseLandmarkerApp/ViewController.m
Normal file
22
mediapipe/examples/ios/PoseLandmarkerApp/ViewController.m
Normal file
|
@ -0,0 +1,22 @@
|
|||
//
|
||||
// ViewController.m
|
||||
// PoseLandmarkerApp
|
||||
//
|
||||
// Created by zhuzhiwen on 2023/7/28.
|
||||
//
|
||||
|
||||
#import "ViewController.h"
|
||||
|
||||
@interface ViewController ()
|
||||
|
||||
@end
|
||||
|
||||
@implementation ViewController
|
||||
|
||||
- (void)viewDidLoad {
|
||||
[super viewDidLoad];
|
||||
// Do any additional setup after loading the view.
|
||||
}
|
||||
|
||||
|
||||
@end
|
18
mediapipe/examples/ios/PoseLandmarkerApp/main.m
Normal file
18
mediapipe/examples/ios/PoseLandmarkerApp/main.m
Normal file
|
@ -0,0 +1,18 @@
|
|||
//
|
||||
// main.m
|
||||
// PoseLandmarkerApp
|
||||
//
|
||||
// Created by zhuzhiwen on 2023/7/28.
|
||||
//
|
||||
|
||||
#import <UIKit/UIKit.h>
|
||||
#import "AppDelegate.h"
|
||||
|
||||
int main(int argc, char * argv[]) {
|
||||
NSString * appDelegateClassName;
|
||||
@autoreleasepool {
|
||||
// Setup code that might create autoreleased objects goes here.
|
||||
appDelegateClassName = NSStringFromClass([AppDelegate class]);
|
||||
}
|
||||
return UIApplicationMain(argc, argv, nil, appDelegateClassName);
|
||||
}
|
|
@ -15,7 +15,7 @@
|
|||
"""Configuration helper for iOS app bundle ids and provisioning profiles.
|
||||
"""
|
||||
|
||||
BUNDLE_ID_PREFIX = "*SEE_IOS_INSTRUCTIONS*.mediapipe.examples"
|
||||
BUNDLE_ID_PREFIX = "-f4e692b-b64b-4ac8-9b2e-60856c2c63ff.mediapipe.examples"
|
||||
|
||||
# Look for a provisioning profile in the example's directory first,
|
||||
# otherwise look for a common one.
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
/Users/apple/Library/MobileDevice/Provisioning Profiles/4ebd27a5-7726-4383-947e-d6d947523558.mobileprovision
|
|
@ -12,9 +12,10 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
package(default_visibility = [
|
||||
"//mediapipe/tasks:internal",
|
||||
])
|
||||
# package(default_visibility = [
|
||||
# "//mediapipe/tasks:internal",
|
||||
# ])
|
||||
package(default_visibility = ["//visibility:public"])
|
||||
|
||||
licenses(["notice"])
|
||||
|
||||
|
|
|
@ -46,6 +46,14 @@ OBJC_TASK_COMMON_DEPS = [
|
|||
"//mediapipe/tasks/ios/components/containers/utils:MPPClassificationResultHelpers",
|
||||
"//mediapipe/tasks/ios/components/containers/utils:MPPCategoryHelpers",
|
||||
"//mediapipe/tasks/ios/common/utils:MPPCommonUtils",
|
||||
"//mediapipe/tasks/ios/vision/core:MPPMask",
|
||||
"//mediapipe/tasks/ios/components/containers:MPPLandmark",
|
||||
"//mediapipe/tasks/ios/components/containers:MPPConnection",
|
||||
"//mediapipe/tasks/ios/components/containers:MPPDetection",
|
||||
"//mediapipe/tasks/ios/components/containers:MPPEmbeddingResult",
|
||||
"//mediapipe/tasks/ios/components/containers:MPPEmbedding",
|
||||
"//mediapipe/tasks/ios/components/containers:MPPClassificationResult",
|
||||
"//mediapipe/tasks/ios/components/containers:MPPCategory",
|
||||
]
|
||||
|
||||
CALCULATORS_AND_GRAPHS = [
|
||||
|
@ -56,6 +64,8 @@ CALCULATORS_AND_GRAPHS = [
|
|||
"//mediapipe/tasks/cc/vision/face_landmarker:face_landmarker_graph",
|
||||
"//mediapipe/tasks/cc/vision/image_classifier:image_classifier_graph",
|
||||
"//mediapipe/tasks/cc/vision/object_detector:object_detector_graph",
|
||||
"//mediapipe/tasks/cc/vision/hand_landmarker:hand_landmarker_graph",
|
||||
"//mediapipe/tasks/cc/vision/pose_landmarker:pose_landmarker_graph",
|
||||
]
|
||||
|
||||
strip_api_include_path_prefix(
|
||||
|
@ -69,6 +79,7 @@ strip_api_include_path_prefix(
|
|||
"//mediapipe/tasks/ios/components/containers:sources/MPPConnection.h",
|
||||
"//mediapipe/tasks/ios/components/containers:sources/MPPDetection.h",
|
||||
"//mediapipe/tasks/ios/components/containers:sources/MPPLandmark.h",
|
||||
"//mediapipe/tasks/ios/vision/core:sources/MPPMask.h",
|
||||
"//mediapipe/tasks/ios/core:sources/MPPBaseOptions.h",
|
||||
"//mediapipe/tasks/ios/core:sources/MPPTaskOptions.h",
|
||||
"//mediapipe/tasks/ios/core:sources/MPPTaskResult.h",
|
||||
|
@ -92,6 +103,12 @@ strip_api_include_path_prefix(
|
|||
"//mediapipe/tasks/ios/vision/object_detector:sources/MPPObjectDetector.h",
|
||||
"//mediapipe/tasks/ios/vision/object_detector:sources/MPPObjectDetectorOptions.h",
|
||||
"//mediapipe/tasks/ios/vision/object_detector:sources/MPPObjectDetectorResult.h",
|
||||
"//mediapipe/tasks/ios/vision/hand_landmarker:sources/MPPHandLandmarkerResult.h",
|
||||
"//mediapipe/tasks/ios/vision/hand_landmarker:sources/MPPHandLandmarkerOptions.h",
|
||||
"//mediapipe/tasks/ios/vision/hand_landmarker:sources/MPPHandLandmarker.h",
|
||||
"//mediapipe/tasks/ios/vision/pose_landmarker:sources/MPPPoseLandmarker.h",
|
||||
"//mediapipe/tasks/ios/vision/pose_landmarker:sources/MPPPoseLandmarkerOptions.h",
|
||||
"//mediapipe/tasks/ios/vision/pose_landmarker:sources/MPPPoseLandmarkerResult.h",
|
||||
],
|
||||
)
|
||||
|
||||
|
@ -181,12 +198,21 @@ apple_static_xcframework(
|
|||
":MPPObjectDetector.h",
|
||||
":MPPObjectDetectorOptions.h",
|
||||
":MPPObjectDetectorResult.h",
|
||||
":MPPHandLandmarker.h",
|
||||
":MPPHandLandmarkerOptions.h",
|
||||
":MPPHandLandmarkerResult.h",
|
||||
":MPPPoseLandmarker.h",
|
||||
":MPPPoseLandmarkerOptions.h",
|
||||
":MPPPoseLandmarkerResult.h",
|
||||
":MPPMask.h",
|
||||
],
|
||||
deps = [
|
||||
"//mediapipe/tasks/ios/vision/face_detector:MPPFaceDetector",
|
||||
"//mediapipe/tasks/ios/vision/face_landmarker:MPPFaceLandmarker",
|
||||
"//mediapipe/tasks/ios/vision/image_classifier:MPPImageClassifier",
|
||||
"//mediapipe/tasks/ios/vision/object_detector:MPPObjectDetector",
|
||||
"//mediapipe/tasks/ios/vision/hand_landmarker:MPPHandLandmarker",
|
||||
"//mediapipe/tasks/ios/vision/pose_landmarker:MPPPoseLandmarker",
|
||||
],
|
||||
)
|
||||
|
||||
|
|
|
@ -30,11 +30,12 @@ if [[ "$(uname)" != "Darwin" ]]; then
|
|||
fi
|
||||
|
||||
BAZEL="${BAZEL:-$(which bazel)}"
|
||||
FRAMEWORK_NAME="MediaPipeTasksCommon"
|
||||
MPP_BUILD_VERSION=${MPP_BUILD_VERSION:-0.0.1-dev}
|
||||
MPP_ROOT_DIR=$(git rev-parse --show-toplevel)
|
||||
ARCHIVE_FRAMEWORK=${ARCHIVE_FRAMEWORK:-true}
|
||||
IS_RELEASE_BUILD=${IS_RELEASE_BUILD:-false}
|
||||
DEST_DIR=${DEST_DIR:-$HOME}
|
||||
IS_RELEASE_BUILD=${IS_RELEASE_BUILD:-true}
|
||||
DEST_DIR=${DEST_DIR:-$HOME/Desktop/mediapipe}
|
||||
|
||||
echo "Destination"
|
||||
echo "${DEST_DIR}"
|
||||
|
@ -165,8 +166,7 @@ function create_framework_archive {
|
|||
|
||||
#----- (3) Move the framework to the destination -----
|
||||
if [[ "${ARCHIVE_FRAMEWORK}" == true ]]; then
|
||||
local TARGET_DIR="$(realpath "${FRAMEWORK_NAME}")"
|
||||
|
||||
local TARGET_DIR="$(realpath)/${FRAMEWORK_NAME}"
|
||||
# Create the framework archive directory.
|
||||
|
||||
local FRAMEWORK_ARCHIVE_DIR
|
||||
|
|
|
@ -12,7 +12,8 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
package(default_visibility = ["//mediapipe/tasks:internal"])
|
||||
# package(default_visibility = ["//mediapipe/tasks:internal"])
|
||||
package(default_visibility = ["//visibility:public"])
|
||||
|
||||
licenses(["notice"])
|
||||
|
||||
|
|
|
@ -12,7 +12,8 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
package(default_visibility = ["//mediapipe/tasks:internal"])
|
||||
# package(default_visibility = ["//mediapipe/tasks:internal"])
|
||||
package(default_visibility = ["//visibility:public"])
|
||||
|
||||
licenses(["notice"])
|
||||
|
||||
|
|
|
@ -12,10 +12,11 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
package(default_visibility = [
|
||||
"//mediapipe/tasks:internal",
|
||||
"//mediapipe/tasks:users",
|
||||
])
|
||||
# package(default_visibility = [
|
||||
# "//mediapipe/tasks:internal",
|
||||
# "//mediapipe/tasks:users",
|
||||
# ])
|
||||
package(default_visibility = ["//visibility:public"])
|
||||
|
||||
licenses(["notice"])
|
||||
|
||||
|
|
|
@ -12,7 +12,8 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
package(default_visibility = ["//mediapipe/tasks:internal"])
|
||||
# package(default_visibility = ["//mediapipe/tasks:internal"])
|
||||
package(default_visibility = ["//visibility:public"])
|
||||
|
||||
licenses(["notice"])
|
||||
|
||||
|
|
|
@ -12,7 +12,8 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
package(default_visibility = ["//mediapipe/tasks:internal"])
|
||||
# package(default_visibility = ["//mediapipe/tasks:internal"])
|
||||
package(default_visibility = ["//visibility:public"])
|
||||
|
||||
licenses(["notice"])
|
||||
|
||||
|
|
|
@ -14,6 +14,13 @@
|
|||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
typedef enum : NSUInteger {
|
||||
MPPBaseOptionsDelegateType_Unknown,
|
||||
MPPBaseOptionsDelegateType_Cpu,
|
||||
MPPBaseOptionsDelegateType_Gpu,
|
||||
MPPBaseOptionsDelegateType_Tpu
|
||||
} MPPBaseOptionsDelegateType;
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
/**
|
||||
|
@ -26,6 +33,8 @@ NS_SWIFT_NAME(BaseOptions)
|
|||
/** The path to the model asset to open and mmap in memory. */
|
||||
@property(nonatomic, copy) NSString *modelAssetPath;
|
||||
|
||||
@property(nonatomic, assign) MPPBaseOptionsDelegateType delegateType;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
|
|
|
@ -12,7 +12,8 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
package(default_visibility = ["//mediapipe/tasks:internal"])
|
||||
#package(default_visibility = ["//mediapipe/tasks:internal"])
|
||||
package(default_visibility = ["//visibility:public"])
|
||||
|
||||
licenses(["notice"])
|
||||
|
||||
|
|
|
@ -33,6 +33,21 @@ using BaseOptionsProto = ::mediapipe::tasks::core::proto::BaseOptions;
|
|||
if (self.modelAssetPath) {
|
||||
baseOptionsProto->mutable_model_asset()->set_file_name(self.modelAssetPath.UTF8String);
|
||||
}
|
||||
if (self.delegateType != MPPBaseOptionsDelegateType_Unknown) {
|
||||
switch (self.delegateType) {
|
||||
case MPPBaseOptionsDelegateType_Cpu:
|
||||
baseOptionsProto->mutable_acceleration()->mutable_tflite();
|
||||
break;
|
||||
case MPPBaseOptionsDelegateType_Gpu:
|
||||
baseOptionsProto->mutable_acceleration()->mutable_gpu()->set_use_advanced_gpu_api(true);
|
||||
break;
|
||||
case MPPBaseOptionsDelegateType_Tpu:
|
||||
baseOptionsProto->mutable_acceleration()->mutable_nnapi()->set_accelerator_name("google-edgetpu");
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@end
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
package(default_visibility = ["//mediapipe/tasks:internal"])
|
||||
#package(default_visibility = ["//mediapipe/tasks:internal"])
|
||||
package(default_visibility = ["//visibility:public"])
|
||||
|
||||
licenses(["notice"])
|
||||
|
||||
|
|
85
mediapipe/tasks/ios/vision/pose_landmarker/BUILD
Normal file
85
mediapipe/tasks/ios/vision/pose_landmarker/BUILD
Normal file
|
@ -0,0 +1,85 @@
|
|||
# Copyright 2023 The MediaPipe Authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
package(default_visibility = ["//visibility:public"])
|
||||
|
||||
licenses(["notice"])
|
||||
|
||||
objc_library(
|
||||
name = "MPPPoseLandmarkerResult",
|
||||
srcs = ["sources/MPPPoseLandmarkerResult.m"],
|
||||
hdrs = ["sources/MPPPoseLandmarkerResult.h"],
|
||||
deps = [
|
||||
"//mediapipe/tasks/ios/vision/core:MPPMask",
|
||||
"//mediapipe/tasks/ios/components/containers:MPPLandmark",
|
||||
"//mediapipe/tasks/ios/core:MPPTaskResult",
|
||||
],
|
||||
)
|
||||
|
||||
objc_library(
|
||||
name = "MPPPoseLandmarkerOptions",
|
||||
srcs = ["sources/MPPPoseLandmarkerOptions.m"],
|
||||
hdrs = ["sources/MPPPoseLandmarkerOptions.h"],
|
||||
deps = [
|
||||
":MPPPoseLandmarkerResult",
|
||||
"//mediapipe/tasks/ios/core:MPPTaskOptions",
|
||||
"//mediapipe/tasks/ios/vision/core:MPPRunningMode",
|
||||
],
|
||||
)
|
||||
|
||||
objc_library(
|
||||
name = "MPPPoseLandmarksConnections",
|
||||
hdrs = ["sources/MPPPoseLandmarksConnections.h"],
|
||||
module_name = "MPPPoseLandmarksConnections",
|
||||
deps = ["//mediapipe/tasks/ios/components/containers:MPPConnection"],
|
||||
)
|
||||
|
||||
objc_library(
|
||||
name = "MPPPoseLandmarker",
|
||||
srcs = ["sources/MPPPoseLandmarker.mm"],
|
||||
hdrs = ["sources/MPPPoseLandmarker.h"],
|
||||
visibility = ["//visibility:public"],
|
||||
copts = [
|
||||
"-ObjC++",
|
||||
"-std=c++17",
|
||||
"-x objective-c++",
|
||||
],
|
||||
module_name = "MPPPoseLandmarker",
|
||||
deps = [
|
||||
":MPPPoseLandmarkerOptions",
|
||||
":MPPPoseLandmarkerResult",
|
||||
":MPPPoseLandmarksConnections",
|
||||
"//mediapipe/tasks/cc/vision/pose_landmarker:pose_landmarker_graph",
|
||||
"//mediapipe/tasks/ios/common/utils:MPPCommonUtils",
|
||||
"//mediapipe/tasks/ios/common/utils:NSStringHelpers",
|
||||
"//mediapipe/tasks/ios/components/containers:MPPConnection",
|
||||
"//mediapipe/tasks/ios/core:MPPTaskInfo",
|
||||
"//mediapipe/tasks/ios/vision/core:MPPImage",
|
||||
"//mediapipe/tasks/ios/vision/core:MPPVisionPacketCreator",
|
||||
"//mediapipe/tasks/ios/vision/core:MPPVisionTaskRunner",
|
||||
"//mediapipe/tasks/ios/vision/pose_landmarker/utils:MPPPoseLandmarkerOptionsHelpers",
|
||||
"//mediapipe/tasks/ios/vision/pose_landmarker/utils:MPPPoseLandmarkerResultHelpers",
|
||||
],
|
||||
)
|
||||
|
||||
filegroup(
|
||||
name = "PoseLandmarkerHeaders",
|
||||
srcs = [
|
||||
"sources/MPPPoseLandmarker.h",
|
||||
"sources/MPPPoseLandmarkerResult.h",
|
||||
"sources/MPPPoseLandmarkerOptions.h",
|
||||
"sources/MPPPoseLandmarksConnections.h",
|
||||
],
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
|
@ -0,0 +1,16 @@
|
|||
//
|
||||
// MPPPoseLandmarker.h
|
||||
// _idx_FaceEffectAppLibrary_956B4690_ios_min12.0
|
||||
//
|
||||
// Created by zhuzhiwen on 2023/7/27.
|
||||
//
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
@interface MPPPoseLandmarker : NSObject
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
|
@ -0,0 +1,12 @@
|
|||
//
|
||||
// MPPPoseLandmarker.m
|
||||
// _idx_FaceEffectAppLibrary_956B4690_ios_min12.0
|
||||
//
|
||||
// Created by zhuzhiwen on 2023/7/27.
|
||||
//
|
||||
|
||||
#import "MPPPoseLandmarker.h"
|
||||
|
||||
@implementation MPPPoseLandmarker
|
||||
|
||||
@end
|
|
@ -0,0 +1,16 @@
|
|||
//
|
||||
// MPPPoseLandmarkerOptions.h
|
||||
// _idx_FaceEffectAppLibrary_956B4690_ios_min12.0
|
||||
//
|
||||
// Created by zhuzhiwen on 2023/7/27.
|
||||
//
|
||||
|
||||
#import <UIKit/UIKit.h>
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
@interface MPPPoseLandmarkerOptions : MPPTaskOptions
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
|
@ -0,0 +1,12 @@
|
|||
//
|
||||
// MPPPoseLandmarkerOptions.m
|
||||
// _idx_FaceEffectAppLibrary_956B4690_ios_min12.0
|
||||
//
|
||||
// Created by zhuzhiwen on 2023/7/27.
|
||||
//
|
||||
|
||||
#import "MPPPoseLandmarkerOptions.h"
|
||||
|
||||
@implementation MPPPoseLandmarkerOptions
|
||||
|
||||
@end
|
|
@ -0,0 +1,16 @@
|
|||
//
|
||||
// MPPPoseLandmarkerResult.h
|
||||
// _idx_FaceEffectAppLibrary_956B4690_ios_min12.0
|
||||
//
|
||||
// Created by zhuzhiwen on 2023/7/27.
|
||||
//
|
||||
|
||||
#import <UIKit/UIKit.h>
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
@interface MPPPoseLandmarkerResult : MPPTaskResult
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
|
@ -0,0 +1,12 @@
|
|||
//
|
||||
// MPPPoseLandmarkerResult.m
|
||||
// _idx_FaceEffectAppLibrary_956B4690_ios_min12.0
|
||||
//
|
||||
// Created by zhuzhiwen on 2023/7/27.
|
||||
//
|
||||
|
||||
#import "MPPPoseLandmarkerResult.h"
|
||||
|
||||
@implementation MPPPoseLandmarkerResult
|
||||
|
||||
@end
|
|
@ -0,0 +1,12 @@
|
|||
//
|
||||
// MPPPoseLandmarksConnections.h
|
||||
// Mediapipe
|
||||
//
|
||||
// Created by zhuzhiwen on 2023/7/27.
|
||||
//
|
||||
|
||||
#ifndef MPPPoseLandmarksConnections_h
|
||||
#define MPPPoseLandmarksConnections_h
|
||||
|
||||
|
||||
#endif /* MPPPoseLandmarksConnections_h */
|
|
@ -0,0 +1,163 @@
|
|||
// Copyright 2023 The MediaPipe Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
#import "mediapipe/tasks/ios/components/containers/sources/MPPConnection.h"
|
||||
#import "mediapipe/tasks/ios/vision/core/sources/MPPImage.h"
|
||||
#import "mediapipe/tasks/ios/vision/pose_landmarker/sources/MPPPoseLandmarkerOptions.h"
|
||||
#import "mediapipe/tasks/ios/vision/pose_landmarker/sources/MPPPoseLandmarkerResult.h"
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
/**
|
||||
* @brief Performs pose landmarks detection on images.
|
||||
*
|
||||
* This API expects a pre-trained pose landmarks model asset bundle.
|
||||
*/
|
||||
NS_SWIFT_NAME(PoseLandmarker)
|
||||
@interface MPPPoseLandmarker : NSObject
|
||||
|
||||
/** The array of connections between all the landmarks in the pose. */
|
||||
@property (class, nonatomic, readonly) NSArray<MPPConnection *> *poseLandmarksConnections;
|
||||
|
||||
/**
|
||||
* Creates a new instance of `MPPPoseLandmarker` from an absolute path to a model asset bundle
|
||||
* stored locally on the device and the default `MPPPoseLandmarkerOptions`.
|
||||
*
|
||||
* @param modelPath An absolute path to a model asset bundle stored locally on the device.
|
||||
* @param error An optional error parameter populated when there is an error in initializing the
|
||||
* pose landmarker.
|
||||
*
|
||||
* @return A new instance of `MPPPoseLandmarker` with the given model path. `nil` if there is an
|
||||
* error in initializing the pose landmarker.
|
||||
*/
|
||||
- (nullable instancetype)initWithModelPath:(NSString *)modelPath error:(NSError **)error;
|
||||
|
||||
/**
|
||||
* Creates a new instance of `MPPPoseLandmarker` from the given `MPPPoseLandmarkerOptions`.
|
||||
*
|
||||
* @param options The options of type `MPPPoseLandmarkerOptions` to use for configuring the
|
||||
* `MPPPoseLandmarker`.
|
||||
* @param error An optional error parameter populated when there is an error in initializing the
|
||||
* pose landmarker.
|
||||
*
|
||||
* @return A new instance of `MPPPoseLandmarker` with the given options. `nil` if there is an
|
||||
* error in initializing the pose landmarker.
|
||||
*/
|
||||
- (nullable instancetype)initWithOptions:(MPPPoseLandmarkerOptions *)options
|
||||
error:(NSError **)error NS_DESIGNATED_INITIALIZER;
|
||||
|
||||
/**
|
||||
* Performs pose landmarks detection on the provided `MPPImage` using the whole image as region of
|
||||
* interest. Rotation will be applied according to the `orientation` property of the provided
|
||||
* `MPPImage`. Only use this method when the `MPPPoseLandmarker` is created with
|
||||
* `MPPRunningModeImage`.
|
||||
*
|
||||
* This method supports performing pose landmarks detection on RGBA images. If your `MPPImage` has a
|
||||
* source type of `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the
|
||||
* underlying pixel buffer must have one of the following pixel format types:
|
||||
* 1. kCVPixelFormatType_32BGRA
|
||||
* 2. kCVPixelFormatType_32RGBA
|
||||
*
|
||||
* If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is
|
||||
* RGB with an Alpha channel.
|
||||
*
|
||||
* @param image The `MPPImage` on which pose landmarks detection is to be performed.
|
||||
* @param error An optional error parameter populated when there is an error in performing pose
|
||||
* landmarks detection on the input image.
|
||||
*
|
||||
* @return An `MPPPoseLandmarkerResult` object that contains the pose pose landmarks detection
|
||||
* results.
|
||||
*/
|
||||
- (nullable MPPPoseLandmarkerResult *)detectInImage:(MPPImage *)image
|
||||
error:(NSError **)error NS_SWIFT_NAME(detect(image:));
|
||||
|
||||
/**
|
||||
* Performs pose landmarks detection on the provided video frame of type `MPPImage` using the whole
|
||||
* image as region of interest. Rotation will be applied according to the `orientation` property of
|
||||
* the provided `MPPImage`. Only use this method when the `MPPPoseLandmarker` is created with
|
||||
* `MPPRunningModeVideo`.
|
||||
*
|
||||
* It's required to provide the video frame's timestamp (in milliseconds). The input timestamps must
|
||||
* be monotonically increasing.
|
||||
*
|
||||
* This method supports performing pose landmarks detection on RGBA images. If your `MPPImage` has a
|
||||
* source type of `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the
|
||||
* underlying pixel buffer must have one of the following pixel format types:
|
||||
* 1. kCVPixelFormatType_32BGRA
|
||||
* 2. kCVPixelFormatType_32RGBA
|
||||
*
|
||||
* If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is
|
||||
* RGB with an Alpha channel.
|
||||
*
|
||||
* @param image The `MPPImage` on which pose landmarks detection is to be performed.
|
||||
* @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input
|
||||
* timestamps must be monotonically increasing.
|
||||
* @param error An optional error parameter populated when there is an error in performing pose
|
||||
* landmarks detection on the input video frame.
|
||||
*
|
||||
* @return An `MPPPoseLandmarkerResult` object that contains the pose pose landmarks detection
|
||||
* results.
|
||||
*/
|
||||
- (nullable MPPPoseLandmarkerResult *)detectInVideoFrame:(MPPImage *)image
|
||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
||||
error:(NSError **)error NS_SWIFT_NAME(detect(videoFrame:timestampInMilliseconds:));
|
||||
|
||||
/**
|
||||
* Sends live stream image data of type `MPPImage` to perform pose landmarks detection using the
|
||||
* whole image as region of interest. Rotation will be applied according to the `orientation`
|
||||
* property of the provided `MPPImage`. Only use this method when the `MPPPoseLandmarker` is created
|
||||
* with `MPPRunningModeLiveStream`.
|
||||
*
|
||||
* The object which needs to be continuously notified of the available results of pose landmarks
|
||||
* detection must confirm to `MPPPoseLandmarkerLiveStreamDelegate` protocol and implement the
|
||||
* `poseLandmarker:didFinishDetectionWithResult:timestampInMilliseconds:error:`
|
||||
* delegate method.
|
||||
*
|
||||
* It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent
|
||||
* to the pose landmarker. The input timestamps must be monotonically increasing.
|
||||
*
|
||||
* This method supports performing pose landmarks detection on RGBA images. If your `MPPImage` has a
|
||||
* source type of `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the
|
||||
* underlying pixel buffer must have one of the following pixel format types:
|
||||
* 1. kCVPixelFormatType_32BGRA
|
||||
* 2. kCVPixelFormatType_32RGBA
|
||||
*
|
||||
* If the input `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color
|
||||
* space is RGB with an Alpha channel.
|
||||
*
|
||||
* If this method is used for performing pose landmarks detection on live camera frames using
|
||||
* `AVFoundation`, ensure that you request `AVCaptureVideoDataOutput` to output frames in
|
||||
* `kCMPixelFormat_32RGBA` using its `videoSettings` property.
|
||||
*
|
||||
* @param image A live stream image data of type `MPPImage` on which pose landmarks detection is to
|
||||
* be performed.
|
||||
* @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input
|
||||
* image is sent to the pose landmarker. The input timestamps must be monotonically increasing.
|
||||
* @param error An optional error parameter populated when there is an error in performing pose
|
||||
* landmarks detection on the input live stream image data.
|
||||
*
|
||||
* @return `YES` if the image was sent to the task successfully, otherwise `NO`.
|
||||
*/
|
||||
- (BOOL)detectAsyncInImage: (MPPImage *)image
|
||||
timestampInMilliseconds: (NSInteger)timestampInMilliseconds
|
||||
error: (NSError **)error NS_SWIFT_NAME(detectAsync(image:timestampInMilliseconds:));
|
||||
|
||||
- (instancetype)init NS_UNAVAILABLE;
|
||||
|
||||
+ (instancetype)new NS_UNAVAILABLE;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
|
@ -0,0 +1,272 @@
|
|||
// Copyright 2023 The MediaPipe Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#import "mediapipe/tasks/ios/vision/pose_landmarker/sources/MPPPoseLandmarker.h"
|
||||
#import "mediapipe/tasks/ios/common/utils/sources/MPPCommonUtils.h"
|
||||
#import "mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.h"
|
||||
#import "mediapipe/tasks/ios/core/sources/MPPTaskInfo.h"
|
||||
#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionPacketCreator.h"
|
||||
#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunner.h"
|
||||
#import "mediapipe/tasks/ios/vision/pose_landmarker/sources/MPPPoseLandmarksConnections.h"
|
||||
#import "mediapipe/tasks/ios/vision/pose_landmarker/utils/sources/MPPPoseLandmarkerOptions+Helpers.h"
|
||||
#import "mediapipe/tasks/ios/vision/pose_landmarker/utils/sources/MPPPoseLandmarkerResult+Helpers.h"
|
||||
#import "mediapipe/framework/calculator_registry.h"
|
||||
|
||||
namespace {
|
||||
using ::mediapipe::NormalizedRect;
|
||||
using ::mediapipe::Packet;
|
||||
using ::mediapipe::Timestamp;
|
||||
using ::mediapipe::tasks::core::PacketMap;
|
||||
using ::mediapipe::tasks::core::PacketsCallback;
|
||||
} // namespace
|
||||
|
||||
static NSString *const kImageTag = @"IMAGE";
|
||||
static NSString *const kImageInStreamName = @"image_in";
|
||||
static NSString *const kImageOutStreamName = @"image_out";
|
||||
static NSString *const kNormRectTag = @"NORM_RECT";
|
||||
static NSString *const kNormRectInStreamName = @"norm_rect_in";
|
||||
static NSString *const kSegmentationMaskTag = @"SEGMENTATION_MASK";
|
||||
static NSString *const kSegmentationMaskStreamName = @"segmentation_mask";
|
||||
static NSString *const kNormLandmarksTag = @"NORM_LANDMARKS";
|
||||
static NSString *const kNormLandmarksStreamName = @"norm_landmarks";
|
||||
static NSString *const kWorldLandmarksTag = @"WORLD_LANDMARKS";
|
||||
static NSString *const kPoseWorldLandmarksStreamName = @"world_landmarks";
|
||||
static NSString *const kTaskGraphName =
|
||||
@"mediapipe.tasks.vision.pose_landmarker.PoseLandmarkerGraph";
|
||||
static NSString *const kTaskName = @"poseLandmarker";
|
||||
|
||||
#define InputPacketMap(imagePacket, normalizedRectPacket) \
|
||||
{ \
|
||||
{kImageInStreamName.cppString, imagePacket}, { \
|
||||
kNormRectInStreamName.cppString, normalizedRectPacket \
|
||||
} \
|
||||
}
|
||||
|
||||
@interface MPPPoseLandmarker () {
|
||||
/** iOS Vision Task Runner */
|
||||
MPPVisionTaskRunner *_visionTaskRunner;
|
||||
dispatch_queue_t _callbackQueue;
|
||||
BOOL _outputSegmentationMasks;
|
||||
}
|
||||
@property(nonatomic, weak) id<MPPPoseLandmarkerLiveStreamDelegate> poseLandmarkerLiveStreamDelegate;
|
||||
@end
|
||||
|
||||
@implementation MPPPoseLandmarker
|
||||
|
||||
|
||||
- (nullable MPPPoseLandmarkerResult *)poseLandmarkerResultWithOutputPacketMap:
|
||||
(PacketMap &)outputPacketMap {
|
||||
return [MPPPoseLandmarkerResult poseLandmarkerResultWithLandmarksPacket:outputPacketMap[kNormLandmarksStreamName.cppString] worldLandmarksPacket:outputPacketMap[kPoseWorldLandmarksStreamName
|
||||
.cppString] segmentationMasksPacket: outputPacketMap[kSegmentationMaskStreamName.cppString] shouldCopyMaskPacketData:YES];
|
||||
}
|
||||
|
||||
- (void)processLiveStreamResult:(absl::StatusOr<PacketMap>)liveStreamResult {
|
||||
if (![self.poseLandmarkerLiveStreamDelegate
|
||||
respondsToSelector:@selector(poseLandmarker:
|
||||
didFinishDetectionWithResult:timestampInMilliseconds:error:)]) {
|
||||
return;
|
||||
}
|
||||
|
||||
NSError *callbackError = nil;
|
||||
if (![MPPCommonUtils checkCppError:liveStreamResult.status() toError:&callbackError]) {
|
||||
dispatch_async(_callbackQueue, ^{
|
||||
[self.poseLandmarkerLiveStreamDelegate poseLandmarker:self
|
||||
didFinishDetectionWithResult:nil
|
||||
timestampInMilliseconds:Timestamp::Unset().Value()
|
||||
error:callbackError];
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
PacketMap &outputPacketMap = liveStreamResult.value();
|
||||
if (outputPacketMap[kImageOutStreamName.cppString].IsEmpty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
MPPPoseLandmarkerResult *result = [self poseLandmarkerResultWithOutputPacketMap:outputPacketMap];
|
||||
|
||||
NSInteger timeStampInMilliseconds =
|
||||
outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() /
|
||||
kMicroSecondsPerMilliSecond;
|
||||
dispatch_async(_callbackQueue, ^{
|
||||
[self.poseLandmarkerLiveStreamDelegate poseLandmarker:self
|
||||
didFinishDetectionWithResult:result
|
||||
timestampInMilliseconds:timeStampInMilliseconds
|
||||
error:callbackError];
|
||||
});
|
||||
}
|
||||
|
||||
- (instancetype)initWithOptions:(MPPPoseLandmarkerOptions *)options error:(NSError **)error {
|
||||
self = [super init];
|
||||
if (self) {
|
||||
_outputSegmentationMasks = options.outputSegmentationMasks;
|
||||
NSMutableArray *outputStreams = [NSMutableArray arrayWithArray:@[
|
||||
[NSString stringWithFormat:@"%@:%@", kNormLandmarksTag, kNormLandmarksStreamName],
|
||||
[NSString
|
||||
stringWithFormat:@"%@:%@", kWorldLandmarksTag, kPoseWorldLandmarksStreamName],
|
||||
[NSString stringWithFormat:@"%@:%@", kImageTag, kImageOutStreamName]
|
||||
]];
|
||||
if (options.outputSegmentationMasks) {
|
||||
[outputStreams addObject:[NSString stringWithFormat:@"%@:%@", kSegmentationMaskTag, kSegmentationMaskStreamName]];
|
||||
}
|
||||
MPPTaskInfo *taskInfo = [[MPPTaskInfo alloc]
|
||||
initWithTaskGraphName:kTaskGraphName
|
||||
inputStreams:@[
|
||||
[NSString stringWithFormat:@"%@:%@", kImageTag, kImageInStreamName],
|
||||
[NSString stringWithFormat:@"%@:%@", kNormRectTag, kNormRectInStreamName]
|
||||
]
|
||||
outputStreams:outputStreams
|
||||
taskOptions:options
|
||||
enableFlowLimiting:options.runningMode == MPPRunningModeLiveStream
|
||||
error:error];
|
||||
|
||||
if (!taskInfo) {
|
||||
return nil;
|
||||
}
|
||||
|
||||
PacketsCallback packetsCallback = nullptr;
|
||||
|
||||
if (options.poseLandmarkerLiveStreamDelegate) {
|
||||
_poseLandmarkerLiveStreamDelegate = options.poseLandmarkerLiveStreamDelegate;
|
||||
|
||||
// Create a private serial dispatch queue in which the deleagte method will be called
|
||||
// asynchronously. This is to ensure that if the client performs a long running operation in
|
||||
// the delegate method, the queue on which the C++ callbacks is invoked is not blocked and is
|
||||
// freed up to continue with its operations.
|
||||
_callbackQueue = dispatch_queue_create(
|
||||
[MPPVisionTaskRunner uniqueDispatchQueueNameWithSuffix:kTaskName], NULL);
|
||||
|
||||
// Capturing `self` as weak in order to avoid `self` being kept in memory
|
||||
// and cause a retain cycle, after self is set to `nil`.
|
||||
MPPPoseLandmarker *__weak weakSelf = self;
|
||||
packetsCallback = [=](absl::StatusOr<PacketMap> liveStreamResult) {
|
||||
[weakSelf processLiveStreamResult:liveStreamResult];
|
||||
};
|
||||
}
|
||||
|
||||
_visionTaskRunner =
|
||||
[[MPPVisionTaskRunner alloc] initWithCalculatorGraphConfig:[taskInfo generateGraphConfig]
|
||||
runningMode:options.runningMode
|
||||
packetsCallback:std::move(packetsCallback)
|
||||
error:error];
|
||||
if (!_visionTaskRunner) {
|
||||
return nil;
|
||||
}
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (instancetype)initWithModelPath:(NSString *)modelPath error:(NSError **)error {
|
||||
MPPPoseLandmarkerOptions *options = [[MPPPoseLandmarkerOptions alloc] init];
|
||||
|
||||
options.baseOptions.modelAssetPath = modelPath;
|
||||
|
||||
return [self initWithOptions:options error:error];
|
||||
}
|
||||
|
||||
- (nullable MPPPoseLandmarkerResult *)poseLandmarkerResultWithOptionalOutputPacketMap:
|
||||
(std::optional<PacketMap> &)outputPacketMap {
|
||||
if (!outputPacketMap.has_value()) {
|
||||
return nil;
|
||||
}
|
||||
MPPPoseLandmarkerResult *result =
|
||||
[self poseLandmarkerResultWithOutputPacketMap:outputPacketMap.value()];
|
||||
return result;
|
||||
}
|
||||
|
||||
- (nullable MPPPoseLandmarkerResult *)detectInImage:(MPPImage *)image error:(NSError **)error {
|
||||
std::optional<NormalizedRect> rect =
|
||||
[_visionTaskRunner normalizedRectWithImageOrientation:image.orientation
|
||||
imageSize:CGSizeMake(image.width, image.height)
|
||||
error:error];
|
||||
if (!rect.has_value()) {
|
||||
return nil;
|
||||
}
|
||||
|
||||
Packet imagePacket = [MPPVisionPacketCreator createPacketWithMPPImage:image error:error];
|
||||
if (imagePacket.IsEmpty()) {
|
||||
return nil;
|
||||
}
|
||||
|
||||
Packet normalizedRectPacket =
|
||||
[MPPVisionPacketCreator createPacketWithNormalizedRect:rect.value()];
|
||||
|
||||
PacketMap inputPacketMap = InputPacketMap(imagePacket, normalizedRectPacket);
|
||||
|
||||
std::optional<PacketMap> outputPacketMap = [_visionTaskRunner processImagePacketMap:inputPacketMap
|
||||
error:error];
|
||||
return [self poseLandmarkerResultWithOptionalOutputPacketMap:outputPacketMap];
|
||||
}
|
||||
|
||||
- (std::optional<PacketMap>)inputPacketMapWithMPPImage:(MPPImage *)image
|
||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
||||
error:(NSError **)error {
|
||||
std::optional<NormalizedRect> rect =
|
||||
[_visionTaskRunner normalizedRectWithImageOrientation:image.orientation
|
||||
imageSize:CGSizeMake(image.width, image.height)
|
||||
error:error];
|
||||
if (!rect.has_value()) {
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
Packet imagePacket = [MPPVisionPacketCreator createPacketWithMPPImage:image
|
||||
timestampInMilliseconds:timestampInMilliseconds
|
||||
error:error];
|
||||
if (imagePacket.IsEmpty()) {
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
Packet normalizedRectPacket =
|
||||
[MPPVisionPacketCreator createPacketWithNormalizedRect:rect.value()
|
||||
timestampInMilliseconds:timestampInMilliseconds];
|
||||
|
||||
PacketMap inputPacketMap = InputPacketMap(imagePacket, normalizedRectPacket);
|
||||
return inputPacketMap;
|
||||
}
|
||||
|
||||
- (nullable MPPPoseLandmarkerResult *)detectInVideoFrame:(MPPImage *)image
|
||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
||||
error:(NSError **)error {
|
||||
std::optional<PacketMap> inputPacketMap = [self inputPacketMapWithMPPImage:image
|
||||
timestampInMilliseconds:timestampInMilliseconds
|
||||
error:error];
|
||||
if (!inputPacketMap.has_value()) {
|
||||
return nil;
|
||||
}
|
||||
|
||||
std::optional<PacketMap> outputPacketMap =
|
||||
[_visionTaskRunner processVideoFramePacketMap:inputPacketMap.value() error:error];
|
||||
|
||||
return [self poseLandmarkerResultWithOptionalOutputPacketMap:outputPacketMap];
|
||||
}
|
||||
|
||||
- (BOOL)detectAsyncInImage: (MPPImage *)image
|
||||
timestampInMilliseconds: (NSInteger)timestampInMilliseconds
|
||||
error: (NSError **)error {
|
||||
std::optional<PacketMap> inputPacketMap = [self inputPacketMapWithMPPImage:image
|
||||
timestampInMilliseconds:timestampInMilliseconds
|
||||
error:error];
|
||||
if (!inputPacketMap.has_value()) {
|
||||
return NO;
|
||||
}
|
||||
|
||||
return [_visionTaskRunner processLiveStreamPacketMap:inputPacketMap.value() error:error];
|
||||
}
|
||||
|
||||
+ (NSArray<MPPConnection *> *)poseLandmarksConnections {
|
||||
return MPPPoseLandmarksConnections;
|
||||
}
|
||||
|
||||
@end
|
||||
|
|
@ -0,0 +1,104 @@
|
|||
// Copyright 2023 The MediaPipe Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
#import "mediapipe/tasks/ios/core/sources/MPPTaskOptions.h"
|
||||
#import "mediapipe/tasks/ios/vision/core/sources/MPPRunningMode.h"
|
||||
#import "mediapipe/tasks/ios/vision/pose_landmarker/sources/MPPPoseLandmarkerResult.h"
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
@class MPPPoseLandmarker;
|
||||
|
||||
/**
|
||||
* This protocol defines an interface for the delegates of `MPPPoseLandmarker` object to receive
|
||||
* results of performing asynchronous pose landmark detection on images (i.e, when `runningMode` =
|
||||
* `MPPRunningModeLiveStream`).
|
||||
*
|
||||
* The delegate of `MPPPoseLandmarker` must adopt `MPPPoseLandmarkerLiveStreamDelegate` protocol.
|
||||
* The methods in this protocol are optional.
|
||||
*/
|
||||
NS_SWIFT_NAME(PoseLandmarkerLiveStreamDelegate)
|
||||
@protocol MPPPoseLandmarkerLiveStreamDelegate <NSObject>
|
||||
|
||||
@optional
|
||||
|
||||
/**
|
||||
* This method notifies a delegate that the results of asynchronous pose landmark detection of an
|
||||
* image submitted to the `MPPPoseLandmarker` is available.
|
||||
*
|
||||
* This method is called on a private serial dispatch queue created by the `MPPPoseLandmarker`
|
||||
* for performing the asynchronous delegates calls.
|
||||
*
|
||||
* @param poseLandmarker The pose landmarker which performed the pose landmarking.
|
||||
* This is useful to test equality when there are multiple instances of `MPPPoseLandmarker`.
|
||||
* @param result The `MPPPoseLandmarkerResult` object that contains a list of detections, each
|
||||
* detection has a bounding box that is expressed in the unrotated input frame of reference
|
||||
* coordinates system, i.e. in `[0,image_width) x [0,image_height)`, which are the dimensions of the
|
||||
* underlying image data.
|
||||
* @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input
|
||||
* image was sent to the pose landmarker.
|
||||
* @param error An optional error parameter populated when there is an error in performing pose
|
||||
* landmark detection on the input live stream image data.
|
||||
*/
|
||||
- (void)poseLandmarker:(MPPPoseLandmarker *)poseLandmarker
|
||||
didFinishDetectionWithResult:(nullable MPPPoseLandmarkerResult *)result
|
||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
||||
error:(nullable NSError *)error
|
||||
NS_SWIFT_NAME(poseLandmarker(_:didFinishDetection:timestampInMilliseconds:error:));
|
||||
@end
|
||||
|
||||
/** Options for setting up a `MPPPoseLandmarker`. */
|
||||
NS_SWIFT_NAME(PoseLandmarkerOptions)
|
||||
@interface MPPPoseLandmarkerOptions : MPPTaskOptions <NSCopying>
|
||||
|
||||
/**
|
||||
* Running mode of the pose landmarker task. Defaults to `MPPRunningModeImage`.
|
||||
* `MPPPoseLandmarker` can be created with one of the following running modes:
|
||||
* 1. `MPPRunningModeImage`: The mode for performing pose landmark detection on single image
|
||||
* inputs.
|
||||
* 2. `MPPRunningModeVideo`: The mode for performing pose landmark detection on the decoded frames
|
||||
* of a video.
|
||||
* 3. `MPPRunningModeLiveStream`: The mode for performing pose landmark detection on a live stream
|
||||
* of input data, such as from the camera.
|
||||
*/
|
||||
@property(nonatomic) MPPRunningMode runningMode;
|
||||
|
||||
/**
|
||||
* An object that confirms to `MPPPoseLandmarkerLiveStreamDelegate` protocol. This object must
|
||||
* implement `poseLandmarker:didFinishDetectionWithResult:timestampInMilliseconds:error:` to
|
||||
* receive the results of performing asynchronous pose landmark detection on images (i.e, when
|
||||
* `runningMode` = `MPPRunningModeLiveStream`).
|
||||
*/
|
||||
@property(nonatomic, weak, nullable) id<MPPPoseLandmarkerLiveStreamDelegate>
|
||||
poseLandmarkerLiveStreamDelegate;
|
||||
|
||||
/** The maximum number of poses that can be detected by the `MPPPoseLandmarker`. */
|
||||
@property(nonatomic) NSInteger numPoses;
|
||||
|
||||
/** The minimum confidence score for the pose detection to be considered successful. */
|
||||
@property(nonatomic) float minPoseDetectionConfidence;
|
||||
|
||||
/** The minimum confidence score of pose presence score in the pose landmark detection. */
|
||||
@property(nonatomic) float minPosePresenceConfidence;
|
||||
|
||||
/** The minimum confidence score for the pose tracking to be considered successful. */
|
||||
@property(nonatomic) float minTrackingConfidence;
|
||||
|
||||
// Whether to output segmentation masks.
|
||||
@property(nonatomic) BOOL outputSegmentationMasks;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
|
@ -0,0 +1,44 @@
|
|||
// Copyright 2023 The MediaPipe Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#import "mediapipe/tasks/ios/vision/pose_landmarker/sources/MPPPoseLandmarkerOptions.h"
|
||||
|
||||
@implementation MPPPoseLandmarkerOptions
|
||||
|
||||
- (instancetype)init {
|
||||
self = [super init];
|
||||
if (self) {
|
||||
_numPoses = 1;
|
||||
_minPoseDetectionConfidence = 0.5f;
|
||||
_minPosePresenceConfidence = 0.5f;
|
||||
_minTrackingConfidence = 0.5f;
|
||||
_outputSegmentationMasks = NO;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (id)copyWithZone:(NSZone *)zone {
|
||||
MPPPoseLandmarkerOptions *poseLandmarkerOptions = [super copyWithZone:zone];
|
||||
|
||||
poseLandmarkerOptions.runningMode = self.runningMode;
|
||||
poseLandmarkerOptions.poseLandmarkerLiveStreamDelegate = self.poseLandmarkerLiveStreamDelegate;
|
||||
poseLandmarkerOptions.numPoses = self.numPoses;
|
||||
poseLandmarkerOptions.minPoseDetectionConfidence = self.minPoseDetectionConfidence;
|
||||
poseLandmarkerOptions.minPosePresenceConfidence = self.minPosePresenceConfidence;
|
||||
poseLandmarkerOptions.minTrackingConfidence = self.minTrackingConfidence;
|
||||
poseLandmarkerOptions.outputSegmentationMasks = self.outputSegmentationMasks;
|
||||
return poseLandmarkerOptions;
|
||||
}
|
||||
|
||||
@end
|
|
@ -0,0 +1,61 @@
|
|||
// Copyright 2023 The MediaPipe Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
#import "mediapipe/tasks/ios/components/containers/sources/MPPLandmark.h"
|
||||
#import "mediapipe/tasks/ios/core/sources/MPPTaskResult.h"
|
||||
#import "mediapipe/tasks/ios/vision/core/sources/MPPMask.h"
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
/** Represents the pose landmarker results generated by MPPPoseLandmarker. */
|
||||
NS_SWIFT_NAME(PoseLandmarkerResult)
|
||||
@interface MPPPoseLandmarkerResult : MPPTaskResult
|
||||
|
||||
/** Pose landmarks of detected poses. */
|
||||
@property(nonatomic, readonly) NSArray<NSArray<MPPNormalizedLandmark *> *> *landmarks;
|
||||
|
||||
/** Pose landmarks in world coordniates of detected poses. */
|
||||
@property(nonatomic, readonly) NSArray<NSArray<MPPLandmark *> *> *worldLandmarks;
|
||||
|
||||
/**
|
||||
* An optional array of `MPPMask` objects. Each `MPPMask` in the array holds a 32 bit float array of
|
||||
* size `image width` * `image height` which represents the confidence mask for each category. Each
|
||||
* element of the float array represents the confidence with which the model predicted that the
|
||||
* corresponding pixel belongs to the category that the mask represents, usually in the range [0,1].
|
||||
* Segmentation masks for pose.
|
||||
*/
|
||||
@property(nonatomic, readonly, nullable) NSArray<MPPMask *> *segmentationMasks;
|
||||
|
||||
/**
|
||||
* Initializes a new `MPPPoseLandmarkerResult` with the given landmarks, world landmarks,
|
||||
* poseedness and timestamp (in milliseconds).
|
||||
*
|
||||
* @param landmarks The pose landmarks of detected poses.
|
||||
* @param worldLandmarks The pose landmarks in world coordniates of detected poses.
|
||||
* @param segmentationMasks The segmentationMasks of detected poses.
|
||||
* @param timestampInMilliseconds The timestamp for this result.
|
||||
*
|
||||
* @return An instance of `MPPGPoseLandmarkerResult` initialized with the given landmarks, world
|
||||
* landmarks, poseedness and timestamp (in milliseconds).
|
||||
*
|
||||
*/
|
||||
- (instancetype)initWithLandmarks:(NSArray<NSArray<MPPNormalizedLandmark *> *> *)landmarks
|
||||
worldLandmarks:(NSArray<NSArray<MPPLandmark *> *> *)worldLandmarks
|
||||
segmentationMasks:(nullable NSArray<MPPMask *> *)segmentationMasks
|
||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
|
@ -0,0 +1,32 @@
|
|||
// Copyright 2023 The MediaPipe Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#import "mediapipe/tasks/ios/vision/pose_landmarker/sources/MPPPoseLandmarkerResult.h"
|
||||
|
||||
@implementation MPPPoseLandmarkerResult
|
||||
|
||||
- (instancetype)initWithLandmarks:(NSArray<NSArray<MPPNormalizedLandmark *> *> *)landmarks
|
||||
worldLandmarks:(NSArray<NSArray<MPPLandmark *> *> *)worldLandmarks
|
||||
segmentationMasks:(nullable NSArray<MPPMask *> *)segmentationMasks
|
||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds {
|
||||
self = [super initWithTimestampInMilliseconds:timestampInMilliseconds];
|
||||
if (self) {
|
||||
_landmarks = landmarks;
|
||||
_worldLandmarks = worldLandmarks;
|
||||
_segmentationMasks = segmentationMasks;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
@end
|
|
@ -0,0 +1,45 @@
|
|||
// Copyright 2023 The MediaPipe Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#ifndef MPPPoseLandmarksConnections_h
|
||||
#define MPPPoseLandmarksConnections_h
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
#import "mediapipe/tasks/ios/components/containers/sources/MPPConnection.h"
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
NSArray<MPPConnection *> *const MPPPoseLandmarksConnections = @[
|
||||
[[MPPConnection alloc] initWithStart:1 end:2], [[MPPConnection alloc] initWithStart:0 end:1],
|
||||
[[MPPConnection alloc] initWithStart:2 end:3], [[MPPConnection alloc] initWithStart:3 end:7],
|
||||
[[MPPConnection alloc] initWithStart:0 end:4], [[MPPConnection alloc] initWithStart:4 end:5],
|
||||
[[MPPConnection alloc] initWithStart:5 end:6], [[MPPConnection alloc] initWithStart:6 end:8],
|
||||
[[MPPConnection alloc] initWithStart:9 end:10], [[MPPConnection alloc] initWithStart:11 end:12],
|
||||
[[MPPConnection alloc] initWithStart:11 end:13], [[MPPConnection alloc] initWithStart:13 end:15],
|
||||
[[MPPConnection alloc] initWithStart:15 end:17], [[MPPConnection alloc] initWithStart:15 end:19],
|
||||
[[MPPConnection alloc] initWithStart:15 end:21], [[MPPConnection alloc] initWithStart:17 end:19],
|
||||
[[MPPConnection alloc] initWithStart:12 end:14], [[MPPConnection alloc] initWithStart:14 end:16],
|
||||
[[MPPConnection alloc] initWithStart:16 end:18], [[MPPConnection alloc] initWithStart:16 end:20],
|
||||
[[MPPConnection alloc] initWithStart:16 end:22], [[MPPConnection alloc] initWithStart:18 end:20],
|
||||
[[MPPConnection alloc] initWithStart:11 end:23], [[MPPConnection alloc] initWithStart:12 end:24],
|
||||
[[MPPConnection alloc] initWithStart:23 end:24], [[MPPConnection alloc] initWithStart:23 end:25],
|
||||
[[MPPConnection alloc] initWithStart:24 end:26], [[MPPConnection alloc] initWithStart:25 end:27],
|
||||
[[MPPConnection alloc] initWithStart:26 end:28], [[MPPConnection alloc] initWithStart:27 end:29],
|
||||
[[MPPConnection alloc] initWithStart:28 end:30], [[MPPConnection alloc] initWithStart:29 end:31],
|
||||
[[MPPConnection alloc] initWithStart:30 end:32], [[MPPConnection alloc] initWithStart:27 end:31]
|
||||
];
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
|
||||
#endif /* MPPPoseLandmarksConnections_h */
|
47
mediapipe/tasks/ios/vision/pose_landmarker/utils/BUILD
Normal file
47
mediapipe/tasks/ios/vision/pose_landmarker/utils/BUILD
Normal file
|
@ -0,0 +1,47 @@
|
|||
# Copyright 2023 The MediaPipe Authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
package(default_visibility = ["//visibility:public"])
|
||||
|
||||
licenses(["notice"])
|
||||
|
||||
objc_library(
|
||||
name = "MPPPoseLandmarkerOptionsHelpers",
|
||||
srcs = ["sources/MPPPoseLandmarkerOptions+Helpers.mm"],
|
||||
hdrs = ["sources/MPPPoseLandmarkerOptions+Helpers.h"],
|
||||
deps = [
|
||||
"//mediapipe/framework:calculator_options_cc_proto",
|
||||
"//mediapipe/tasks/cc/vision/pose_detector/proto:pose_detector_graph_options_cc_proto",
|
||||
"//mediapipe/tasks/cc/vision/pose_landmarker/proto:pose_landmarker_graph_options_cc_proto",
|
||||
"//mediapipe/tasks/cc/vision/pose_landmarker/proto:pose_landmarks_detector_graph_options_cc_proto",
|
||||
"//mediapipe/tasks/ios/common/utils:NSStringHelpers",
|
||||
"//mediapipe/tasks/ios/core:MPPTaskOptionsProtocol",
|
||||
"//mediapipe/tasks/ios/core/utils:MPPBaseOptionsHelpers",
|
||||
"//mediapipe/tasks/ios/vision/pose_landmarker:MPPPoseLandmarkerOptions",
|
||||
],
|
||||
)
|
||||
|
||||
objc_library(
|
||||
name = "MPPPoseLandmarkerResultHelpers",
|
||||
srcs = ["sources/MPPPoseLandmarkerResult+Helpers.mm"],
|
||||
hdrs = ["sources/MPPPoseLandmarkerResult+Helpers.h"],
|
||||
deps = [
|
||||
"//mediapipe/framework:packet",
|
||||
"//mediapipe/framework/formats:image",
|
||||
"//mediapipe/framework/formats:landmark_cc_proto",
|
||||
"//mediapipe/tasks/ios/components/containers/utils:MPPCategoryHelpers",
|
||||
"//mediapipe/tasks/ios/components/containers/utils:MPPLandmarkHelpers",
|
||||
"//mediapipe/tasks/ios/vision/pose_landmarker:MPPPoseLandmarkerResult",
|
||||
],
|
||||
)
|
|
@ -0,0 +1,32 @@
|
|||
// Copyright 2023 The MediaPipe Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include "mediapipe/framework/calculator_options.pb.h"
|
||||
#import "mediapipe/tasks/ios/core/sources/MPPTaskOptionsProtocol.h"
|
||||
#import "mediapipe/tasks/ios/vision/pose_landmarker/sources/MPPPoseLandmarkerOptions.h"
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
@interface MPPPoseLandmarkerOptions (Helpers) <MPPTaskOptionsProtocol>
|
||||
|
||||
/**
|
||||
* Populates the provided `CalculatorOptions` proto container with the current settings.
|
||||
*
|
||||
* @param optionsProto The `CalculatorOptions` proto object to copy the settings to.
|
||||
*/
|
||||
- (void)copyToProto:(::mediapipe::CalculatorOptions *)optionsProto;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
|
@ -0,0 +1,56 @@
|
|||
// Copyright 2023 The MediaPipe Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#import "mediapipe/tasks/ios/vision/pose_landmarker/utils/sources/MPPPoseLandmarkerOptions+Helpers.h"
|
||||
#import "mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.h"
|
||||
#import "mediapipe/tasks/ios/core/utils/sources/MPPBaseOptions+Helpers.h"
|
||||
#include "mediapipe/tasks/cc/vision/pose_detector/proto/pose_detector_graph_options.pb.h"
|
||||
#include "mediapipe/tasks/cc/vision/pose_landmarker/proto/pose_landmarker_graph_options.pb.h"
|
||||
#include "mediapipe/tasks/cc/vision/pose_landmarker/proto/pose_landmarks_detector_graph_options.pb.h"
|
||||
|
||||
namespace {
|
||||
using CalculatorOptionsProto = mediapipe::CalculatorOptions;
|
||||
using PoseLandmarkerGraphOptionsProto =
|
||||
::mediapipe::tasks::vision::pose_landmarker::proto::PoseLandmarkerGraphOptions;
|
||||
using PoseDetectorGraphOptionsProto =
|
||||
::mediapipe::tasks::vision::pose_detector::proto::PoseDetectorGraphOptions;
|
||||
using PoseLandmarksDetectorGraphOptionsProto =
|
||||
::mediapipe::tasks::vision::pose_landmarker::proto::PoseLandmarksDetectorGraphOptions;
|
||||
} // namespace
|
||||
|
||||
@implementation MPPPoseLandmarkerOptions (Helpers)
|
||||
|
||||
- (void)copyToProto:(CalculatorOptionsProto *)optionsProto {
|
||||
PoseLandmarkerGraphOptionsProto *poseLandmarkerGraphOptionsProto =
|
||||
optionsProto
|
||||
->MutableExtension(PoseLandmarkerGraphOptionsProto::ext);
|
||||
poseLandmarkerGraphOptionsProto->Clear();
|
||||
|
||||
[self.baseOptions copyToProto:poseLandmarkerGraphOptionsProto->mutable_base_options()
|
||||
withUseStreamMode:self.runningMode != MPPRunningModeImage];
|
||||
|
||||
poseLandmarkerGraphOptionsProto->set_min_tracking_confidence(self.minTrackingConfidence);
|
||||
|
||||
PoseDetectorGraphOptionsProto *poseDetectorGraphOptionsProto =
|
||||
poseLandmarkerGraphOptionsProto->mutable_pose_detector_graph_options();
|
||||
poseDetectorGraphOptionsProto->set_num_poses(self.numPoses);
|
||||
poseDetectorGraphOptionsProto->set_min_detection_confidence(self.minPoseDetectionConfidence);
|
||||
|
||||
PoseLandmarksDetectorGraphOptionsProto *poseLandmarksDetectorGraphOptionsProto =
|
||||
poseLandmarkerGraphOptionsProto->mutable_pose_landmarks_detector_graph_options();
|
||||
poseLandmarksDetectorGraphOptionsProto->set_min_detection_confidence(
|
||||
self.minPosePresenceConfidence);
|
||||
}
|
||||
|
||||
@end
|
|
@ -0,0 +1,45 @@
|
|||
// Copyright 2023 The MediaPipe Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#import "mediapipe/tasks/ios/vision/pose_landmarker/sources/MPPPoseLandmarkerResult.h"
|
||||
|
||||
#include "mediapipe/framework/formats/image.h"
|
||||
#include "mediapipe/framework/formats/landmark.pb.h"
|
||||
#include "mediapipe/framework/packet.h"
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
static const int kMicroSecondsPerMilliSecond = 1000;
|
||||
|
||||
@interface MPPPoseLandmarkerResult (Helpers)
|
||||
|
||||
|
||||
/**
|
||||
* Creates an `MPPPoseLandmarkerResult` from landmarks, world landmarks and segmentationMasks packets.
|
||||
*
|
||||
* @param landmarksPacket A MediaPipe packet wrapping a `std::vector<NormalizedlandmarkListProto>`.
|
||||
* @param worldLandmarksPacket A MediaPipe packet wrapping a `std::vector<LandmarkListProto>`.
|
||||
* @param segmentationMasksPacket a MediaPipe packet wrapping a `std::vector<ImageProto>`.
|
||||
*
|
||||
* @return An `MPPPoseLandmarkerResult` object that contains the pose landmark detection
|
||||
* results.
|
||||
*/
|
||||
+ (MPPPoseLandmarkerResult *)poseLandmarkerResultWithLandmarksPacket:(const mediapipe::Packet &)landmarksPacket
|
||||
worldLandmarksPacket:(const mediapipe::Packet &)worldLandmarksPacket
|
||||
segmentationMasksPacket:(const mediapipe::Packet &)segmentationMasksPacket
|
||||
shouldCopyMaskPacketData:(BOOL)shouldCopyMaskPacketData;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
|
@ -0,0 +1,103 @@
|
|||
// Copyright 2023 The MediaPipe Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#import "mediapipe/tasks/ios/vision/pose_landmarker/utils/sources/MPPPoseLandmarkerResult+Helpers.h"
|
||||
#import "mediapipe/tasks/ios/components/containers/utils/sources/MPPLandmark+Helpers.h"
|
||||
|
||||
namespace {
|
||||
using ImageProto = ::mediapipe::Image;
|
||||
using LandmarkListProto = ::mediapipe::LandmarkList;
|
||||
using NormalizedLandmarkListProto = ::mediapipe::NormalizedLandmarkList;
|
||||
using ::mediapipe::Packet;
|
||||
} // namespace
|
||||
|
||||
@implementation MPPPoseLandmarkerResult (Helpers)
|
||||
|
||||
+ (MPPPoseLandmarkerResult *)emptyPoseLandmarkerResultWithTimestampInMilliseconds:
|
||||
(NSInteger)timestampInMilliseconds {
|
||||
return [[MPPPoseLandmarkerResult alloc] initWithLandmarks:@[]
|
||||
worldLandmarks:@[]
|
||||
segmentationMasks:@[]
|
||||
timestampInMilliseconds:timestampInMilliseconds];
|
||||
}
|
||||
|
||||
+ (MPPPoseLandmarkerResult *)poseLandmarkerResultWithLandmarksPacket:(const mediapipe::Packet &)landmarksPacket
|
||||
worldLandmarksPacket:(const mediapipe::Packet &)worldLandmarksPacket
|
||||
segmentationMasksPacket:(const mediapipe::Packet &)segmentationMasksPacket
|
||||
shouldCopyMaskPacketData:(BOOL)shouldCopyMaskPacketData {
|
||||
NSInteger timestampInMilliseconds =
|
||||
(NSInteger)(landmarksPacket.Timestamp().Value() / kMicroSecondsPerMilliSecond);
|
||||
|
||||
if (landmarksPacket.IsEmpty()) {
|
||||
return [MPPPoseLandmarkerResult
|
||||
emptyPoseLandmarkerResultWithTimestampInMilliseconds:timestampInMilliseconds];
|
||||
}
|
||||
if (!landmarksPacket.ValidateAsType<std::vector<NormalizedLandmarkListProto> >().ok() ||
|
||||
!worldLandmarksPacket.ValidateAsType<std::vector<LandmarkListProto> >().ok()) {
|
||||
return [MPPPoseLandmarkerResult
|
||||
emptyPoseLandmarkerResultWithTimestampInMilliseconds:timestampInMilliseconds];
|
||||
}
|
||||
|
||||
const std::vector<NormalizedLandmarkListProto> &landmarksProto =
|
||||
landmarksPacket.Get<std::vector<NormalizedLandmarkListProto>>();
|
||||
NSMutableArray<NSMutableArray<MPPNormalizedLandmark *> *> *multiPoseLandmarks =
|
||||
[NSMutableArray arrayWithCapacity:(NSUInteger)landmarksProto.size()];
|
||||
|
||||
for (const auto &landmarkListProto : landmarksProto) {
|
||||
NSMutableArray<MPPNormalizedLandmark *> *landmarks =
|
||||
[NSMutableArray arrayWithCapacity:(NSUInteger)landmarkListProto.landmark().size()];
|
||||
|
||||
for (const auto &normalizedLandmarkProto : landmarkListProto.landmark()) {
|
||||
MPPNormalizedLandmark *normalizedLandmark =
|
||||
[MPPNormalizedLandmark normalizedLandmarkWithProto:normalizedLandmarkProto];
|
||||
[landmarks addObject:normalizedLandmark];
|
||||
}
|
||||
|
||||
[multiPoseLandmarks addObject:landmarks];
|
||||
}
|
||||
|
||||
const std::vector<LandmarkListProto> &worldLandmarksProto =
|
||||
worldLandmarksPacket.Get<std::vector<LandmarkListProto>>();
|
||||
NSMutableArray<NSMutableArray<MPPLandmark *> *> *multiPoseWorldLandmarks =
|
||||
[NSMutableArray arrayWithCapacity:(NSUInteger)worldLandmarksProto.size()];
|
||||
|
||||
for (const auto &worldLandmarkListProto : worldLandmarksProto) {
|
||||
NSMutableArray<MPPLandmark *> *worldLandmarks =
|
||||
[NSMutableArray arrayWithCapacity:(NSUInteger)worldLandmarkListProto.landmark().size()];
|
||||
|
||||
for (const auto &landmarkProto : worldLandmarkListProto.landmark()) {
|
||||
MPPLandmark *landmark = [MPPLandmark landmarkWithProto:landmarkProto];
|
||||
[worldLandmarks addObject:landmark];
|
||||
}
|
||||
|
||||
[multiPoseWorldLandmarks addObject:worldLandmarks];
|
||||
}
|
||||
|
||||
NSMutableArray<MPPMask *> *multiPoseSegmentationMasksProto = [[NSMutableArray alloc] init];
|
||||
if (segmentationMasksPacket.ValidateAsType<std::vector<ImageProto> >().ok()) {
|
||||
const std::vector<ImageProto> &segmentationMasksProto = segmentationMasksPacket.Get<std::vector<ImageProto> >();
|
||||
for (const auto &imageProto : segmentationMasksProto) {
|
||||
MPPMask *segmentationMasks = [[MPPMask alloc] initWithFloat32Data:(float *)imageProto.GetImageFrameSharedPtr().get()->PixelData() width:imageProto.width() height:imageProto.height() shouldCopy:shouldCopyMaskPacketData ? YES : NO];
|
||||
[multiPoseSegmentationMasksProto addObject:segmentationMasks];
|
||||
}
|
||||
}
|
||||
MPPPoseLandmarkerResult *poseLandmarkerResult =
|
||||
[[MPPPoseLandmarkerResult alloc] initWithLandmarks:multiPoseLandmarks
|
||||
worldLandmarks:multiPoseWorldLandmarks
|
||||
segmentationMasks:multiPoseSegmentationMasksProto
|
||||
timestampInMilliseconds:timestampInMilliseconds];
|
||||
return poseLandmarkerResult;
|
||||
}
|
||||
|
||||
@end
|
Loading…
Reference in New Issue
Block a user