diff --git a/WORKSPACE b/WORKSPACE
index 0639591ff..d7a833877 100644
--- a/WORKSPACE
+++ b/WORKSPACE
@@ -316,17 +316,23 @@ http_archive(
# iOS basic build deps.
+#http_archive(
+# name = "build_bazel_rules_apple",
+# patch_args = [
+# "-p1",
+# ],
+# patches = [
+# # Bypass checking ios unit test runner when building MP ios applications.
+# "@//third_party:build_bazel_rules_apple_bypass_test_runner_check.diff",
+# ],
+# sha256 = "77e8bf6fda706f420a55874ae6ee4df0c9d95da6c7838228b26910fc82eea5a2",
+# url = "https://github.com/bazelbuild/rules_apple/releases/download/0.32.0/rules_apple.0.32.0.tar.gz",
+#)
+
http_archive(
name = "build_bazel_rules_apple",
- patch_args = [
- "-p1",
- ],
- patches = [
- # Bypass checking ios unit test runner when building MP ios applications.
- "@//third_party:build_bazel_rules_apple_bypass_test_runner_check.diff",
- ],
- sha256 = "77e8bf6fda706f420a55874ae6ee4df0c9d95da6c7838228b26910fc82eea5a2",
- url = "https://github.com/bazelbuild/rules_apple/releases/download/0.32.0/rules_apple.0.32.0.tar.gz",
+ sha256 = "90e3b5e8ff942be134e64a83499974203ea64797fd620eddeb71b3a8e1bff681",
+ url = "https://github.com/bazelbuild/rules_apple/releases/download/1.1.2/rules_apple.1.1.2.tar.gz",
)
load(
@@ -343,6 +349,13 @@ load(
swift_rules_dependencies()
+load(
+ "@build_bazel_rules_swift//swift:extras.bzl",
+ "swift_rules_extra_dependencies",
+)
+
+swift_rules_extra_dependencies()
+
http_archive(
name = "build_bazel_apple_support",
sha256 = "741366f79d900c11e11d8efd6cc6c66a31bfb2451178b58e0b5edc6f1db17b35",
@@ -515,11 +528,11 @@ load("@build_bazel_rules_android//android:rules.bzl", "android_ndk_repository",
android_sdk_repository(
name = "androidsdk",
build_tools_version = "30.0.3",
- # path = "/Users/tj/Library/Android/sdk", # Path to Android SDK, optional if $ANDROID_HOME is set
+# path = "/Users/tj/Library/Android/sdk", # Path to Android SDK, optional if $ANDROID_HOME is set
)
android_ndk_repository(
name = "androidndk", # Required. Name *must* be "androidndk".
api_level = 21,
- # path = "/Users/tj/Library/Android/sdk/ndk/21.4.7075529", # Optional. Can be omitted if `ANDROID_NDK_HOME` environment variable is set.
+# path = "/Users/tj/Library/Android/sdk/ndk/21.4.7075529", # Optional. Can be omitted if `ANDROID_NDK_HOME` environment variable is set.
)
diff --git a/mediapipe/MediaPipe.tulsiproj/Configs/MediaPipe.tulsigen b/mediapipe/MediaPipe.tulsiproj/Configs/MediaPipe.tulsigen
index 68b3d405b..69b95f381 100644
--- a/mediapipe/MediaPipe.tulsiproj/Configs/MediaPipe.tulsigen
+++ b/mediapipe/MediaPipe.tulsiproj/Configs/MediaPipe.tulsigen
@@ -23,7 +23,8 @@
"mediapipe/objc/testing/app/BUILD"
],
"buildTargets" : [
- "//mediapipe/examples/ios/posetrackingsolutiongpu:PoseTrackingSolutionGpuApp"
+ "//mediapipe/examples/ios/posetracking-lindera/PoseTrackingLindera:posetracking-lindera",
+ "//mediapipe/swift/solutions/lindera:lindera"
],
"optionSet" : {
"BazelBuildOptionsDebug" : {
@@ -90,7 +91,14 @@
"mediapipe/examples/ios/iristrackinggpu",
"mediapipe/examples/ios/objectdetectioncpu",
"mediapipe/examples/ios/objectdetectiongpu",
+ "mediapipe/examples/ios/posetracking-lindera",
+ "mediapipe/examples/ios/posetracking-lindera/PoseTrackingLindera",
+ "mediapipe/examples/ios/posetracking-lindera/PoseTrackingLindera/Base.lproj",
"mediapipe/examples/ios/posetrackinggpu",
+ "mediapipe/examples/ios/posetrackingsolution-swift",
+ "mediapipe/examples/ios/posetrackingsolution-swift/Camera",
+ "mediapipe/examples/ios/posetrackingsolution-swift/ViewModels",
+ "mediapipe/examples/ios/posetrackingsolution-swift/Views",
"mediapipe/examples/ios/posetrackingsolutiongpu",
"mediapipe/examples/ios/posetrackingsolutiongpu/Base.lproj",
"mediapipe/examples/ios/selfiesegmentationgpu",
@@ -117,6 +125,9 @@
"mediapipe/objc",
"mediapipe/objc/solutions",
"mediapipe/objc/solutions/posetracking_gpu",
+ "mediapipe/swift",
+ "mediapipe/swift/solutions",
+ "mediapipe/swift/solutions/lindera",
"mediapipe/util",
"mediapipe/util/android",
"mediapipe/util/android/file",
diff --git a/mediapipe/MediaPipe.tulsiproj/project.tulsiconf b/mediapipe/MediaPipe.tulsiproj/project.tulsiconf
index 84480e106..d176c9778 100644
--- a/mediapipe/MediaPipe.tulsiproj/project.tulsiconf
+++ b/mediapipe/MediaPipe.tulsiproj/project.tulsiconf
@@ -2,7 +2,7 @@
"configDefaults" : {
"optionSet" : {
"BazelBuildOptionsDebug" : {
- "p" : "--config=debug --strip=never --features=oso_prefix_is_pwd --apple_generate_dsym"
+ "p" : "--strip=never --features=oso_prefix_is_pwd --apple_generate_dsym"
},
"CLANG_CXX_LANGUAGE_STANDARD" : {
"p" : "c++14"
@@ -24,11 +24,13 @@
"mediapipe/examples/ios/objectdetectioncpu",
"mediapipe/examples/ios/objectdetectiongpu",
"mediapipe/examples/ios/objectdetectiontrackinggpu",
+ "mediapipe/examples/ios/posetracking-lindera/PoseTrackingLindera",
"mediapipe/examples/ios/posetrackinggpu",
"mediapipe/examples/ios/posetrackingsolutiongpu",
"mediapipe/examples/ios/selfiesegmentationgpu",
"mediapipe/objc",
- "mediapipe/objc/solutions/posetracking_gpu"
+ "mediapipe/objc/solutions/posetracking_gpu",
+ "mediapipe/swift/solutions/lindera"
],
"projectName" : "Mediapipe",
"workspaceRoot" : "../.."
diff --git a/mediapipe/examples/ios/posetracking-lindera/PoseTrackingLindera/AppDelegate.swift b/mediapipe/examples/ios/posetracking-lindera/PoseTrackingLindera/AppDelegate.swift
new file mode 100644
index 000000000..abbea80fa
--- /dev/null
+++ b/mediapipe/examples/ios/posetracking-lindera/PoseTrackingLindera/AppDelegate.swift
@@ -0,0 +1,22 @@
+//
+// AppDelegate.swift
+// PoseTrackingLindera
+//
+// Created by Mautisim Munir on 17/10/2022.
+//
+
+import UIKit
+
+@main
+class AppDelegate: UIResponder, UIApplicationDelegate {
+
+ var window: UIWindow?
+
+ func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool {
+ // Override point for customization after application launch.
+ return true
+ }
+
+
+}
+
diff --git a/mediapipe/examples/ios/posetracking-lindera/PoseTrackingLindera/Assets.xcassets/AccentColor.colorset/Contents.json b/mediapipe/examples/ios/posetracking-lindera/PoseTrackingLindera/Assets.xcassets/AccentColor.colorset/Contents.json
new file mode 100644
index 000000000..eb8789700
--- /dev/null
+++ b/mediapipe/examples/ios/posetracking-lindera/PoseTrackingLindera/Assets.xcassets/AccentColor.colorset/Contents.json
@@ -0,0 +1,11 @@
+{
+ "colors" : [
+ {
+ "idiom" : "universal"
+ }
+ ],
+ "info" : {
+ "author" : "xcode",
+ "version" : 1
+ }
+}
diff --git a/mediapipe/examples/ios/posetracking-lindera/PoseTrackingLindera/Assets.xcassets/AppIcon.appiconset/Contents.json b/mediapipe/examples/ios/posetracking-lindera/PoseTrackingLindera/Assets.xcassets/AppIcon.appiconset/Contents.json
new file mode 100644
index 000000000..5a3257a7d
--- /dev/null
+++ b/mediapipe/examples/ios/posetracking-lindera/PoseTrackingLindera/Assets.xcassets/AppIcon.appiconset/Contents.json
@@ -0,0 +1,93 @@
+{
+ "images" : [
+ {
+ "idiom" : "iphone",
+ "scale" : "2x",
+ "size" : "20x20"
+ },
+ {
+ "idiom" : "iphone",
+ "scale" : "3x",
+ "size" : "20x20"
+ },
+ {
+ "idiom" : "iphone",
+ "scale" : "2x",
+ "size" : "29x29"
+ },
+ {
+ "idiom" : "iphone",
+ "scale" : "3x",
+ "size" : "29x29"
+ },
+ {
+ "idiom" : "iphone",
+ "scale" : "2x",
+ "size" : "40x40"
+ },
+ {
+ "idiom" : "iphone",
+ "scale" : "3x",
+ "size" : "40x40"
+ },
+ {
+ "idiom" : "iphone",
+ "scale" : "2x",
+ "size" : "60x60"
+ },
+ {
+ "idiom" : "iphone",
+ "scale" : "3x",
+ "size" : "60x60"
+ },
+ {
+ "idiom" : "ipad",
+ "scale" : "1x",
+ "size" : "20x20"
+ },
+ {
+ "idiom" : "ipad",
+ "scale" : "2x",
+ "size" : "20x20"
+ },
+ {
+ "idiom" : "ipad",
+ "scale" : "1x",
+ "size" : "29x29"
+ },
+ {
+ "idiom" : "ipad",
+ "scale" : "2x",
+ "size" : "29x29"
+ },
+ {
+ "idiom" : "ipad",
+ "scale" : "1x",
+ "size" : "40x40"
+ },
+ {
+ "idiom" : "ipad",
+ "scale" : "2x",
+ "size" : "40x40"
+ },
+ {
+ "idiom" : "ipad",
+ "scale" : "2x",
+ "size" : "76x76"
+ },
+ {
+ "idiom" : "ipad",
+ "scale" : "2x",
+ "size" : "83.5x83.5"
+ },
+ {
+ "idiom" : "ios-marketing",
+ "scale" : "1x",
+ "size" : "1024x1024"
+ }
+ ],
+ "info" : {
+ "author" : "xcode",
+ "version" : 1
+ }
+}
diff --git a/mediapipe/examples/ios/posetracking-lindera/PoseTrackingLindera/Assets.xcassets/Contents.json b/mediapipe/examples/ios/posetracking-lindera/PoseTrackingLindera/Assets.xcassets/Contents.json
new file mode 100644
index 000000000..73c00596a
--- /dev/null
+++ b/mediapipe/examples/ios/posetracking-lindera/PoseTrackingLindera/Assets.xcassets/Contents.json
@@ -0,0 +1,6 @@
+{
+ "info" : {
+ "author" : "xcode",
+ "version" : 1
+ }
+}
diff --git a/mediapipe/examples/ios/posetracking-lindera/PoseTrackingLindera/BUILD.bazel b/mediapipe/examples/ios/posetracking-lindera/PoseTrackingLindera/BUILD.bazel
new file mode 100644
index 000000000..6ba6af02e
--- /dev/null
+++ b/mediapipe/examples/ios/posetracking-lindera/PoseTrackingLindera/BUILD.bazel
@@ -0,0 +1,81 @@
+load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library")
+load("@build_bazel_rules_apple//apple:ios.bzl", "ios_application")
+load("@build_bazel_rules_apple//apple:resources.bzl", "apple_resource_bundle")
+load(
+ "//mediapipe/examples/ios:bundle_id.bzl",
+ "BUNDLE_ID_PREFIX",
+ "example_provisioning",
+)
+
+MIN_IOS_VERSION = "14.0"
+
+swift_library(
+ name = "lindera_app_lib",
+ srcs = glob(["**/*.swift"]),
+ data =[
+ "Base.lproj/LaunchScreen.storyboard",
+ "Base.lproj/Main.storyboard",
+ ],
+ linkopts = [
+ "-lc++",
+ "-std=c++17",
+ "-lstdc++",
+ ],
+ module_name = "lindera_app_lib",
+ visibility = ["//visibility:public"],
+ deps = [
+ "@ios_opencv//:OpencvFramework",
+ ] + [
+ "//mediapipe/swift/solutions/lindera:lindera",
+ ],
+
+ # +
+ # [
+ # "//mediapipe/objc/solutions/posetracking_gpu:posetracking_gpu_solution",
+ # "//mediapipe/objc:mediapipe_framework_ios",
+ # "//mediapipe/objc:mediapipe_input_sources_ios",
+ # "//mediapipe/objc:mediapipe_layer_renderer",
+ # ] + select({
+ # "//mediapipe:ios_i386": [],
+ # "//mediapipe:ios_x86_64": [],
+ # "//conditions:default": [
+ # "//mediapipe/graphs/pose_tracking:pose_tracking_gpu_deps",
+ # "//mediapipe/framework/formats:landmark_cc_proto",
+ # ],
+ # })
+)
+
+apple_resource_bundle(
+ name = "app_resources",
+ bundle_id = BUNDLE_ID_PREFIX + ".PoseTrackingGpu",
+ resources = [
+ "Assets.xcassets",
+ ],
+ visibility = ["//visibility:public"],
+)
+
+ios_application(
+ name = "posetracking-lindera",
+ bundle_id = BUNDLE_ID_PREFIX + ".PoseTrackingGpu",
+ families = [
+ "iphone",
+ "ipad",
+ ],
+ infoplists = [
+ "Info.plist",
+ "//mediapipe/examples/ios/common:Info.plist",
+ ],
+ linkopts = [
+ "-lc++",
+ ],
+ minimum_os_version = MIN_IOS_VERSION,
+ provisioning_profile = example_provisioning(),
+ resources = [":app_resources"],
+ visibility = ["//visibility:public"],
+ deps = [
+ ":lindera_app_lib",
+ ":app_resources",
+
+ # "@ios_opencv//:OpencvFramework",
+ ],
+)
diff --git a/mediapipe/examples/ios/posetracking-lindera/PoseTrackingLindera/Base.lproj/LaunchScreen.storyboard b/mediapipe/examples/ios/posetracking-lindera/PoseTrackingLindera/Base.lproj/LaunchScreen.storyboard
new file mode 100644
index 000000000..2b4910a52
--- /dev/null
+++ b/mediapipe/examples/ios/posetracking-lindera/PoseTrackingLindera/Base.lproj/LaunchScreen.storyboard
@@ -0,0 +1,32 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/mediapipe/examples/ios/posetracking-lindera/PoseTrackingLindera/Base.lproj/Main.storyboard b/mediapipe/examples/ios/posetracking-lindera/PoseTrackingLindera/Base.lproj/Main.storyboard
new file mode 100644
index 000000000..2ca04eade
--- /dev/null
+++ b/mediapipe/examples/ios/posetracking-lindera/PoseTrackingLindera/Base.lproj/Main.storyboard
@@ -0,0 +1,84 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/mediapipe/examples/ios/posetracking-lindera/PoseTrackingLindera/Info.plist b/mediapipe/examples/ios/posetracking-lindera/PoseTrackingLindera/Info.plist
new file mode 100644
index 000000000..426e53640
--- /dev/null
+++ b/mediapipe/examples/ios/posetracking-lindera/PoseTrackingLindera/Info.plist
@@ -0,0 +1,35 @@
+
+
+
+
+ UIApplicationSceneManifest
+
+ NSCameraUsageDescription
+ This app uses the camera to demonstrate live video processing.
+ CFBundleDevelopmentRegion
+ en
+ CFBundleExecutable
+ $(EXECUTABLE_NAME)
+ CFBundleIdentifier
+ $(PRODUCT_BUNDLE_IDENTIFIER)
+ CFBundleInfoDictionaryVersion
+ 6.0
+ CFBundleName
+ $(PRODUCT_NAME)
+ CFBundlePackageType
+ APPL
+ CFBundleShortVersionString
+ 1.0
+ CFBundleVersion
+ 1
+ LSRequiresIPhoneOS
+
+ UIApplicationSupportsMultipleScenes
+
+ MainViewController
+ ViewController
+ UILaunchStoryboardName
+ LaunchScreen
+
+
+
diff --git a/mediapipe/examples/ios/posetracking-lindera/PoseTrackingLindera/ViewController.swift b/mediapipe/examples/ios/posetracking-lindera/PoseTrackingLindera/ViewController.swift
new file mode 100644
index 000000000..13ec1ee48
--- /dev/null
+++ b/mediapipe/examples/ios/posetracking-lindera/PoseTrackingLindera/ViewController.swift
@@ -0,0 +1,164 @@
+//
+// ViewController.swift
+// PoseTrackingLindera
+//
+// Created by Mautisim Munir on 17/10/2022.
+//
+import UIKit
+import LinderaDetection
+
+
+class ViewController: UIViewController {
+
+ //MARK: - UI Elements
+
+
+ @IBOutlet var liveView : UIView!
+ @IBOutlet var showLandmarksButton: UIButton!
+ @IBOutlet var chooseModelButton: UIButton!
+ @IBOutlet var titleview: UIView!
+ @IBOutlet var fpsLabel: UILabel!
+
+
+ //MARK: - UI Actions
+
+ @IBAction func setModelComplexity(){
+ let alert = UIAlertController(
+ title: nil,
+ message: nil,
+ preferredStyle: .actionSheet
+ )
+
+ alert.addAction(
+ .init(title: "MODEL (LITE)", style: .default) {[weak self] _ in
+ self?.lindera.setModelComplexityNow(complexity: 0)
+ self?.updateModelButtonText()
+
+ }
+ )
+
+ alert.addAction(
+ .init(title: "MODEL (FULL)", style: .default) { [weak self] _ in
+ self?.lindera.setModelComplexityNow(complexity: 1)
+ self?.updateModelButtonText()
+
+
+ }
+ )
+ alert.addAction(
+ .init(title: "MODEL (HEAVY)", style: .default) { [weak self] _ in
+ self?.lindera.setModelComplexityNow(complexity: 2)
+ self?.updateModelButtonText()
+
+
+ }
+ )
+
+ present(alert, animated: true)
+ }
+
+ @IBAction func showLandmarksButtonTouch(sender: UIButton){
+
+ lindera.showLandmarks(value: !lindera.areLandmarksShown());
+ updateLandmarksButtonText()
+
+ }
+
+ // MARK: - LinderaDelegate
+
+ /// A simple LinderaDelegate implementation that prints nose coordinates if detected
+ class LinderaDelegateImpl:LinderaDelegate{
+ func lindera(_ lindera: Lindera, didDetect event: Asensei3DPose.Event) {
+ // if let kpt = event.pose.nose{
+ // // Printing causes large drops in FPS
+ // print("LinderaDelegateImpl: Nose Keypoint (\(String(describing: kpt.position.x)),\(String(describing: kpt.position.y)),\(kpt.position.z)) with confidence \(kpt.confidence)")
+ // }
+ }
+
+
+ }
+ // MARK: - UI Text Modifications
+ func updateLandmarksButtonText(){
+ if (lindera.areLandmarksShown()){
+ showLandmarksButton.setTitle("LANDMARKS (ON)", for: UIControl.State.normal)
+ }else{
+ showLandmarksButton.setTitle("LANDMARKS (OFF)", for: UIControl.State.normal)
+ }
+
+ }
+
+ func updateModelButtonText(){
+ var text = "MODEL "
+ switch(lindera.getModelComplexity()){
+
+ case 0:
+ text += "(LITE)"
+ break;
+ case 1:
+ text += "(FULL)"
+ break;
+ case 2:
+ text += "(HEAVY)"
+ break;
+
+ default:
+ text += "(Unknown)"
+ }
+ chooseModelButton.setTitle(text, for: UIControl.State.normal)
+ }
+
+
+
+ // MARK: - State Objects
+
+ let lindera = Lindera()
+
+ let linderaDelegate = LinderaDelegateImpl()
+
+ // MARK: - UI Setup
+ override func viewDidLoad() {
+ super.viewDidLoad()
+
+ self.lindera.delegate = linderaDelegate
+
+
+ if let view = self.liveView{
+ // add lindera camera view to our app's UIView i.e. liveView
+ view.addSubview(lindera.cameraView)
+ // Expand our cameraView frame to liveView frame
+ self.lindera.cameraView.frame = view.bounds
+
+ // Setting Up Constraints (No necessary with above statement)
+ self.lindera.cameraView.translatesAutoresizingMaskIntoConstraints = false
+ NSLayoutConstraint.activate([
+ self.lindera.cameraView.leadingAnchor.constraint(equalTo: view.leadingAnchor),
+ self.lindera.cameraView.topAnchor.constraint(equalTo: view.topAnchor),
+ self.lindera.cameraView.trailingAnchor.constraint(equalTo: view.trailingAnchor),
+ self.lindera.cameraView.bottomAnchor.constraint(equalTo: view.bottomAnchor)
+ ])
+ }
+
+ // This function is called whenver there is an fps update
+ self.lindera.setFpsDelegate(fpsDelegate: {[weak self] fps in
+ DispatchQueue.main.async {
+ self?.fpsLabel.text = "\(Int(fps)) fps"
+ }
+
+ })
+
+ // Otherwise they are hidden
+ self.liveView.bringSubviewToFront(titleview)
+ self.liveView.bringSubviewToFront(fpsLabel)
+
+ // Make the Landmarks and Model button text reflect the state in lindera object
+ updateLandmarksButtonText()
+ updateModelButtonText()
+
+ lindera.startCamera()
+
+
+ }
+
+
+}
+
diff --git a/mediapipe/examples/ios/posetrackingsolution-swiftui/Assets.xcassets/AccentColor.colorset/Contents.json b/mediapipe/examples/ios/posetrackingsolution-swiftui/Assets.xcassets/AccentColor.colorset/Contents.json
new file mode 100644
index 000000000..eb8789700
--- /dev/null
+++ b/mediapipe/examples/ios/posetrackingsolution-swiftui/Assets.xcassets/AccentColor.colorset/Contents.json
@@ -0,0 +1,11 @@
+{
+ "colors" : [
+ {
+ "idiom" : "universal"
+ }
+ ],
+ "info" : {
+ "author" : "xcode",
+ "version" : 1
+ }
+}
diff --git a/mediapipe/examples/ios/posetrackingsolution-swiftui/Assets.xcassets/AppIcon.appiconset/Contents.json b/mediapipe/examples/ios/posetrackingsolution-swiftui/Assets.xcassets/AppIcon.appiconset/Contents.json
new file mode 100644
index 000000000..9221b9bb1
--- /dev/null
+++ b/mediapipe/examples/ios/posetrackingsolution-swiftui/Assets.xcassets/AppIcon.appiconset/Contents.json
@@ -0,0 +1,98 @@
+{
+ "images" : [
+ {
+ "idiom" : "iphone",
+ "scale" : "2x",
+ "size" : "20x20"
+ },
+ {
+ "idiom" : "iphone",
+ "scale" : "3x",
+ "size" : "20x20"
+ },
+ {
+ "idiom" : "iphone",
+ "scale" : "2x",
+ "size" : "29x29"
+ },
+ {
+ "idiom" : "iphone",
+ "scale" : "3x",
+ "size" : "29x29"
+ },
+ {
+ "idiom" : "iphone",
+ "scale" : "2x",
+ "size" : "40x40"
+ },
+ {
+ "idiom" : "iphone",
+ "scale" : "3x",
+ "size" : "40x40"
+ },
+ {
+ "idiom" : "iphone",
+ "scale" : "2x",
+ "size" : "60x60"
+ },
+ {
+ "idiom" : "iphone",
+ "scale" : "3x",
+ "size" : "60x60"
+ },
+ {
+ "idiom" : "ipad",
+ "scale" : "1x",
+ "size" : "20x20"
+ },
+ {
+ "idiom" : "ipad",
+ "scale" : "2x",
+ "size" : "20x20"
+ },
+ {
+ "idiom" : "ipad",
+ "scale" : "1x",
+ "size" : "29x29"
+ },
+ {
+ "idiom" : "ipad",
+ "scale" : "2x",
+ "size" : "29x29"
+ },
+ {
+ "idiom" : "ipad",
+ "scale" : "1x",
+ "size" : "40x40"
+ },
+ {
+ "idiom" : "ipad",
+ "scale" : "2x",
+ "size" : "40x40"
+ },
+ {
+ "idiom" : "ipad",
+ "scale" : "1x",
+ "size" : "76x76"
+ },
+ {
+ "idiom" : "ipad",
+ "scale" : "2x",
+ "size" : "76x76"
+ },
+ {
+ "idiom" : "ipad",
+ "scale" : "2x",
+ "size" : "83.5x83.5"
+ },
+ {
+ "idiom" : "ios-marketing",
+ "scale" : "1x",
+ "size" : "1024x1024"
+ }
+ ],
+ "info" : {
+ "author" : "xcode",
+ "version" : 1
+ }
+}
diff --git a/mediapipe/examples/ios/posetrackingsolution-swiftui/Assets.xcassets/Contents.json b/mediapipe/examples/ios/posetrackingsolution-swiftui/Assets.xcassets/Contents.json
new file mode 100644
index 000000000..73c00596a
--- /dev/null
+++ b/mediapipe/examples/ios/posetrackingsolution-swiftui/Assets.xcassets/Contents.json
@@ -0,0 +1,6 @@
+{
+ "info" : {
+ "author" : "xcode",
+ "version" : 1
+ }
+}
diff --git a/mediapipe/examples/ios/posetrackingsolution-swiftui/Assets.xcassets/man-selfie.imageset/Contents.json b/mediapipe/examples/ios/posetrackingsolution-swiftui/Assets.xcassets/man-selfie.imageset/Contents.json
new file mode 100644
index 000000000..ce366465e
--- /dev/null
+++ b/mediapipe/examples/ios/posetrackingsolution-swiftui/Assets.xcassets/man-selfie.imageset/Contents.json
@@ -0,0 +1,21 @@
+{
+ "images" : [
+ {
+ "filename" : "man-selfie.jpg",
+ "idiom" : "universal",
+ "scale" : "1x"
+ },
+ {
+ "idiom" : "universal",
+ "scale" : "2x"
+ },
+ {
+ "idiom" : "universal",
+ "scale" : "3x"
+ }
+ ],
+ "info" : {
+ "author" : "xcode",
+ "version" : 1
+ }
+}
diff --git a/mediapipe/examples/ios/posetrackingsolution-swiftui/Assets.xcassets/man-selfie.imageset/man-selfie.jpg b/mediapipe/examples/ios/posetrackingsolution-swiftui/Assets.xcassets/man-selfie.imageset/man-selfie.jpg
new file mode 100644
index 000000000..636d9af96
Binary files /dev/null and b/mediapipe/examples/ios/posetrackingsolution-swiftui/Assets.xcassets/man-selfie.imageset/man-selfie.jpg differ
diff --git a/mediapipe/examples/ios/posetrackingsolution-swiftui/Assets.xcassets/test-people.imageset/Contents.json b/mediapipe/examples/ios/posetrackingsolution-swiftui/Assets.xcassets/test-people.imageset/Contents.json
new file mode 100644
index 000000000..a7dafdd0c
--- /dev/null
+++ b/mediapipe/examples/ios/posetrackingsolution-swiftui/Assets.xcassets/test-people.imageset/Contents.json
@@ -0,0 +1,21 @@
+{
+ "images" : [
+ {
+ "filename" : "test-people.jpg",
+ "idiom" : "universal",
+ "scale" : "1x"
+ },
+ {
+ "idiom" : "universal",
+ "scale" : "2x"
+ },
+ {
+ "idiom" : "universal",
+ "scale" : "3x"
+ }
+ ],
+ "info" : {
+ "author" : "xcode",
+ "version" : 1
+ }
+}
diff --git a/mediapipe/examples/ios/posetrackingsolution-swiftui/Assets.xcassets/test-people.imageset/test-people.jpg b/mediapipe/examples/ios/posetrackingsolution-swiftui/Assets.xcassets/test-people.imageset/test-people.jpg
new file mode 100644
index 000000000..a583cb26b
Binary files /dev/null and b/mediapipe/examples/ios/posetrackingsolution-swiftui/Assets.xcassets/test-people.imageset/test-people.jpg differ
diff --git a/mediapipe/examples/ios/posetrackingsolution-swiftui/BUILD.bazel b/mediapipe/examples/ios/posetrackingsolution-swiftui/BUILD.bazel
new file mode 100644
index 000000000..842acb3f6
--- /dev/null
+++ b/mediapipe/examples/ios/posetrackingsolution-swiftui/BUILD.bazel
@@ -0,0 +1,73 @@
+load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library")
+load("@build_bazel_rules_apple//apple:ios.bzl", "ios_application")
+load("@build_bazel_rules_apple//apple:resources.bzl", "apple_resource_bundle")
+load(
+ "//mediapipe/examples/ios:bundle_id.bzl",
+ "BUNDLE_ID_PREFIX",
+ "example_provisioning",
+)
+
+MIN_IOS_VERSION = "14.0"
+
+swift_library(
+ name = "app_lib",
+ srcs = glob(["**/*.swift"]),
+ data = [
+ "//mediapipe/graphs/pose_tracking:pose_tracking_gpu.binarypb",
+ "//mediapipe/modules/pose_detection:pose_detection.tflite",
+ "//mediapipe/modules/pose_landmark:pose_landmark_full.tflite",
+ ],
+ linkopts = [
+ "-lc++",
+ "-std=c++17",
+ "-lstdc++",
+ ],
+ module_name = "app_lib",
+ visibility = ["//visibility:public"],
+ deps = [
+ "@ios_opencv//:OpencvFramework",
+ ] + [
+ "//mediapipe/objc/solutions/posetracking_gpu:posetracking_gpu_solution",
+ "//mediapipe/objc:mediapipe_framework_ios",
+ "//mediapipe/objc:mediapipe_input_sources_ios",
+ "//mediapipe/objc:mediapipe_layer_renderer",
+ ] + select({
+ "//mediapipe:ios_i386": [],
+ "//mediapipe:ios_x86_64": [],
+ "//conditions:default": [
+ "//mediapipe/graphs/pose_tracking:pose_tracking_gpu_deps",
+ "//mediapipe/framework/formats:landmark_cc_proto",
+ ],
+ }),
+)
+
+apple_resource_bundle(
+ name = "app_resources",
+ bundle_id = BUNDLE_ID_PREFIX + ".PoseTrackingGpu",
+ resources = [
+ "Assets.xcassets",
+ ],
+ visibility = ["//visibility:public"],
+)
+
+ios_application(
+ name = "posetracking-solution-swift",
+ bundle_id = BUNDLE_ID_PREFIX + ".PoseTrackingGpu",
+ families = [
+ "iphone",
+ "ipad",
+ ],
+ infoplists = ["Info.plist"],
+ linkopts = [
+ "-lc++",
+ ],
+ minimum_os_version = MIN_IOS_VERSION,
+ provisioning_profile = example_provisioning(),
+ resources = [":app_resources"],
+ visibility = ["//visibility:public"],
+ deps = [
+ ":app_lib",
+ ":app_resources",
+ # "@ios_opencv//:OpencvFramework",
+ ],
+)
diff --git a/mediapipe/examples/ios/posetrackingsolution-swiftui/Camera/CameraError.swift b/mediapipe/examples/ios/posetrackingsolution-swiftui/Camera/CameraError.swift
new file mode 100644
index 000000000..417d931b5
--- /dev/null
+++ b/mediapipe/examples/ios/posetrackingsolution-swiftui/Camera/CameraError.swift
@@ -0,0 +1,32 @@
+import Foundation
+
+enum CameraError: Error {
+ case cameraUnavailable
+ case cannotAddInput
+ case cannotAddOutput
+ case createCaptureInput(Error)
+ case deniedAuthorization
+ case restrictedAuthorization
+ case unknownAuthorization
+}
+
+extension CameraError: LocalizedError {
+ var errorDescription: String? {
+ switch self {
+ case .cameraUnavailable:
+ return "Camera unavailable"
+ case .cannotAddInput:
+ return "Cannot add capture input to session"
+ case .cannotAddOutput:
+ return "Cannot add video output to session"
+ case .createCaptureInput(let error):
+ return "Creating capture input for camera: \(error.localizedDescription)"
+ case .deniedAuthorization:
+ return "Camera access denied"
+ case .restrictedAuthorization:
+ return "Attempting to access a restricted capture device"
+ case .unknownAuthorization:
+ return "Unknown authorization status for capture device"
+ }
+ }
+}
diff --git a/mediapipe/examples/ios/posetrackingsolution-swiftui/Camera/CameraManager.swift b/mediapipe/examples/ios/posetrackingsolution-swiftui/Camera/CameraManager.swift
new file mode 100644
index 000000000..812ffa0db
--- /dev/null
+++ b/mediapipe/examples/ios/posetrackingsolution-swiftui/Camera/CameraManager.swift
@@ -0,0 +1,142 @@
+import AVFoundation
+// 1
+class CameraManager: ObservableObject {
+
+ // 1
+ @Published var error: CameraError?
+ // 2
+ let session = AVCaptureSession()
+ // 3
+ private let sessionQueue = DispatchQueue(label: "com.raywenderlich.SessionQ")
+ // 4
+ private let videoOutput = AVCaptureVideoDataOutput()
+ // 5
+ private var status = Status.unconfigured
+
+
+ // 2
+ enum Status {
+ case unconfigured
+ case configured
+ case unauthorized
+ case failed
+ }
+ // 3
+ static let shared = CameraManager()
+ // 4
+ private init() {
+ configure()
+ }
+ // 5
+ private func configure() {
+ checkPermissions()
+ sessionQueue.async {
+ self.configureCaptureSession()
+ self.session.startRunning()
+ }
+
+ }
+ func set(
+ _ delegate: AVCaptureVideoDataOutputSampleBufferDelegate,
+ queue: DispatchQueue
+ ) {
+ sessionQueue.async {
+ self.videoOutput.setSampleBufferDelegate(delegate, queue: queue)
+ }
+ }
+
+ private func set(error: CameraError?) {
+ DispatchQueue.main.async {
+ self.error = error
+ }
+ }
+ private func checkPermissions() {
+ // 1
+ switch AVCaptureDevice.authorizationStatus(for: .video) {
+ case .notDetermined:
+ // 2
+ sessionQueue.suspend()
+ AVCaptureDevice.requestAccess(for: .video) { authorized in
+ // 3
+ if !authorized {
+ self.status = .unauthorized
+ self.set(error: .deniedAuthorization)
+ }
+ self.sessionQueue.resume()
+ }
+ // 4
+ case .restricted:
+ status = .unauthorized
+ set(error: .restrictedAuthorization)
+ case .denied:
+ status = .unauthorized
+ set(error: .deniedAuthorization)
+ // 5
+ case .authorized:
+ break
+ // 6
+ @unknown default:
+ status = .unauthorized
+ set(error: .unknownAuthorization)
+ }
+ }
+ private func configureCaptureSession() {
+ guard status == .unconfigured else {
+ return
+ }
+ session.beginConfiguration()
+ defer {
+ session.commitConfiguration()
+ }
+ let device = AVCaptureDevice.default(
+ .builtInWideAngleCamera,
+ for: .video,
+ position: .front)
+ guard let camera = device else {
+ set(error: .cameraUnavailable)
+ status = .failed
+ return
+ }
+
+ do {
+ // 1
+ let cameraInput = try AVCaptureDeviceInput(device: camera)
+ // 2
+ if session.canAddInput(cameraInput) {
+ session.addInput(cameraInput)
+ } else {
+ // 3
+ set(error: .cannotAddInput)
+ status = .failed
+ return
+ }
+ } catch {
+ // 4
+ set(error: .createCaptureInput(error))
+ status = .failed
+ return
+ }
+ // 1
+ if session.canAddOutput(videoOutput) {
+ session.addOutput(videoOutput)
+ // 2
+ videoOutput.videoSettings =
+ [kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA]
+ // 3
+ let videoConnection = videoOutput.connection(with: .video)
+ videoConnection?.videoOrientation = .portrait
+ } else {
+ // 4
+ set(error: .cannotAddOutput)
+ status = .failed
+ return
+ }
+
+
+
+
+ }
+
+
+
+}
diff --git a/mediapipe/examples/ios/posetrackingsolution-swiftui/Camera/FrameManager.swift b/mediapipe/examples/ios/posetrackingsolution-swiftui/Camera/FrameManager.swift
new file mode 100644
index 000000000..d445372fe
--- /dev/null
+++ b/mediapipe/examples/ios/posetrackingsolution-swiftui/Camera/FrameManager.swift
@@ -0,0 +1,33 @@
+import AVFoundation
+// 1
+class FrameManager: NSObject, ObservableObject {
+ // 2
+ static let shared = FrameManager()
+ // 3
+ @Published var current: CVPixelBuffer?
+ // 4
+ let videoOutputQueue = DispatchQueue(
+ label: "com.raywenderlich.VideoOutputQ",
+ qos: .userInitiated,
+ attributes: [],
+ autoreleaseFrequency: .workItem)
+ // 5
+ private override init() {
+ super.init()
+ CameraManager.shared.set(self, queue: videoOutputQueue)
+ }
+}
+
+extension FrameManager: AVCaptureVideoDataOutputSampleBufferDelegate {
+ func captureOutput(
+ _ output: AVCaptureOutput,
+ didOutput sampleBuffer: CMSampleBuffer,
+ from connection: AVCaptureConnection
+ ) {
+ if let buffer = sampleBuffer.imageBuffer {
+ DispatchQueue.main.async {
+ self.current = buffer
+ }
+ }
+ }
+}
diff --git a/mediapipe/examples/ios/posetrackingsolution-swiftui/ContentView.swift b/mediapipe/examples/ios/posetrackingsolution-swiftui/ContentView.swift
new file mode 100644
index 000000000..44c29ba14
--- /dev/null
+++ b/mediapipe/examples/ios/posetrackingsolution-swiftui/ContentView.swift
@@ -0,0 +1,35 @@
+import SwiftUI
+import MPPoseTracking
+
+
+
+
+
+struct ContentView: View {
+ @StateObject private var model = ContentViewModel()
+
+ let poseTracking = PoseTracking(poseTrackingOptions: PoseTrackingOptions(showLandmarks: true))
+
+
+
+ init() {
+ poseTracking?.renderer.layer.frame = self.body.layer
+
+ }
+
+ var body: some View {
+ VStack{
+ FrameView(image: model.frame)
+ .edgesIgnoringSafeArea(.all)
+// buildInferenceView()
+ }
+
+
+ }
+}
+
+struct ContentView_Previews: PreviewProvider {
+ static var previews: some View {
+ ContentView()
+ }
+}
diff --git a/mediapipe/examples/ios/posetrackingsolution-swiftui/Info.plist b/mediapipe/examples/ios/posetrackingsolution-swiftui/Info.plist
new file mode 100644
index 000000000..450f30af8
--- /dev/null
+++ b/mediapipe/examples/ios/posetrackingsolution-swiftui/Info.plist
@@ -0,0 +1,47 @@
+
+
+
+
+ NSPhotoLibraryUsageDescription
+ We need to access your library
+ CFBundleDevelopmentRegion
+ $(DEVELOPMENT_LANGUAGE)
+ CFBundleExecutable
+ $(EXECUTABLE_NAME)
+ CFBundleIdentifier
+ $(PRODUCT_BUNDLE_IDENTIFIER)
+ CFBundleInfoDictionaryVersion
+ 6.0
+ CFBundleName
+ $(PRODUCT_NAME)
+ CFBundlePackageType
+ APPL
+ CFBundleShortVersionString
+ 1.0
+ CFBundleVersion
+ 1
+ LSRequiresIPhoneOS
+
+ NSCameraUsageDescription
+ We need to access your Camera
+ UIApplicationSupportsIndirectInputEvents
+
+ UIRequiredDeviceCapabilities
+
+ armv7
+
+ UISupportedInterfaceOrientations
+
+ UIInterfaceOrientationPortrait
+ UIInterfaceOrientationLandscapeLeft
+ UIInterfaceOrientationLandscapeRight
+
+ UISupportedInterfaceOrientations~ipad
+
+ UIInterfaceOrientationPortrait
+ UIInterfaceOrientationPortraitUpsideDown
+ UIInterfaceOrientationLandscapeLeft
+ UIInterfaceOrientationLandscapeRight
+
+
+
diff --git a/mediapipe/examples/ios/posetrackingsolution-swiftui/PoseTrackingSolutionSwiftApp.swift b/mediapipe/examples/ios/posetrackingsolution-swiftui/PoseTrackingSolutionSwiftApp.swift
new file mode 100644
index 000000000..a218f5c3a
--- /dev/null
+++ b/mediapipe/examples/ios/posetrackingsolution-swiftui/PoseTrackingSolutionSwiftApp.swift
@@ -0,0 +1,17 @@
+//
+// ModelsLabTestApp.swift
+// ModelsLabTest
+//
+// Created by Mautisim Munir on 02/06/2022.
+//
+
+import SwiftUI
+
+@main
+struct ModelsLabTestApp: App {
+ var body: some Scene {
+ WindowGroup {
+ ContentView()
+ }
+ }
+}
diff --git a/mediapipe/examples/ios/posetrackingsolution-swiftui/Preview Content/Preview Assets.xcassets/Contents.json b/mediapipe/examples/ios/posetrackingsolution-swiftui/Preview Content/Preview Assets.xcassets/Contents.json
new file mode 100644
index 000000000..73c00596a
--- /dev/null
+++ b/mediapipe/examples/ios/posetrackingsolution-swiftui/Preview Content/Preview Assets.xcassets/Contents.json
@@ -0,0 +1,6 @@
+{
+ "info" : {
+ "author" : "xcode",
+ "version" : 1
+ }
+}
diff --git a/mediapipe/examples/ios/posetrackingsolution-swiftui/ViewModels/ContentViewModel.swift b/mediapipe/examples/ios/posetrackingsolution-swiftui/ViewModels/ContentViewModel.swift
new file mode 100644
index 000000000..1b8ee9380
--- /dev/null
+++ b/mediapipe/examples/ios/posetrackingsolution-swiftui/ViewModels/ContentViewModel.swift
@@ -0,0 +1,60 @@
+//
+// ContentViewModel.swift
+// ModelsLabTest
+//
+// Created by Mautisim Munir on 12/06/2022.
+//
+
+import Foundation
+import CoreImage
+import UIKit
+import SwiftUI
+import MPPoseTracking
+
+
+
+
+class ContentViewModel: ObservableObject {
+ // 1
+ @Published var frame: CGImage?
+ // 2
+ private let frameManager = FrameManager.shared
+ var counter = 0
+
+
+
+
+// let modelPath = Bundle.main.path(forResource: "model", ofType: "edgem")!
+
+// let model:EdgeModel
+
+ init() {
+// model = EdgeModel(modelPath: modelPath)
+ setupSubscriptions()
+ }
+ // 3
+ func setupSubscriptions() {
+ // 1
+ frameManager.$current
+ // 2
+ .receive(on: RunLoop.main)
+ // 3
+
+
+ .compactMap{
+ buffer in
+ if buffer != nil {
+ let ciContext = CIContext()
+ let ciImage = CIImage(cvImageBuffer: buffer!)
+ let cgImage = ciContext.createCGImage(ciImage, from: ciImage.extent)
+
+ return cgImage;
+ }
+ return nil
+
+
+ }
+ .assign(to: &$frame)
+
+ }
+}
diff --git a/mediapipe/examples/ios/posetrackingsolution-swiftui/Views/FrameView.swift b/mediapipe/examples/ios/posetrackingsolution-swiftui/Views/FrameView.swift
new file mode 100644
index 000000000..17f08a0c2
--- /dev/null
+++ b/mediapipe/examples/ios/posetrackingsolution-swiftui/Views/FrameView.swift
@@ -0,0 +1,41 @@
+//
+// FrameView.swift
+// ModelsLabTest
+//
+// Created by Mautisim Munir on 12/06/2022.
+//
+
+import SwiftUI
+
+struct FrameView: View {
+ var image: CGImage?
+ private let label = Text("Camera feed")
+ var body: some View {
+ // 1
+ if let image = image {
+ // 2
+ GeometryReader { geometry in
+ // 3
+ Image(image, scale: 1.0, orientation: .upMirrored, label: label)
+ .resizable()
+// .scaledToFit()
+ .scaledToFill()
+ .frame(
+ width: geometry.size.width,
+ height: geometry.size.height,
+ alignment: .center)
+ .clipped()
+ }
+ } else {
+ // 4
+ Color.black
+ }
+
+ }
+}
+
+struct FrameView_Previews: PreviewProvider {
+ static var previews: some View {
+ FrameView()
+ }
+}
diff --git a/mediapipe/graphs/pose_tracking/pose_tracking_gpu.pbtxt b/mediapipe/graphs/pose_tracking/pose_tracking_gpu.pbtxt
index 9cfe3fd29..ba429e09f 100644
--- a/mediapipe/graphs/pose_tracking/pose_tracking_gpu.pbtxt
+++ b/mediapipe/graphs/pose_tracking/pose_tracking_gpu.pbtxt
@@ -44,6 +44,8 @@ node {
node {
calculator: "PoseLandmarkGpu"
input_side_packet: "ENABLE_SEGMENTATION:enable_segmentation"
+ input_side_packet: "MODEL_COMPLEXITY:model_complexity"
+
input_stream: "IMAGE:throttled_input_video"
output_stream: "LANDMARKS:pose_landmarks"
output_stream: "SEGMENTATION_MASK:segmentation_mask"
diff --git a/mediapipe/objc/solutions/posetracking_gpu/BUILD b/mediapipe/objc/solutions/posetracking_gpu/BUILD
index 4f2841cb5..d25e498d6 100644
--- a/mediapipe/objc/solutions/posetracking_gpu/BUILD
+++ b/mediapipe/objc/solutions/posetracking_gpu/BUILD
@@ -4,13 +4,21 @@ objc_library(
"*.h",
"*.mm",
]),
+ module_name = "MPPoseTracking",
hdrs = [
"PoseTracking.h",
"PoseTrackingOptions.h",
+ "PoseTrackingResults.h"
],
copts = [
"-Wno-shorten-64-to-32",
],
+ data = [
+ "//mediapipe/graphs/pose_tracking:pose_tracking_gpu.binarypb",
+ "//mediapipe/modules/pose_detection:pose_detection.tflite",
+ "//mediapipe/modules/pose_landmark:pose_landmark_heavy.tflite",
+ "//mediapipe/modules/pose_landmark:pose_landmark_full.tflite",
+ "//mediapipe/modules/pose_landmark:pose_landmark_lite.tflite", ] ,
sdk_frameworks = ["Accelerate"],
visibility = ["//visibility:public"],
deps = [
diff --git a/mediapipe/objc/solutions/posetracking_gpu/PoseTracking.h b/mediapipe/objc/solutions/posetracking_gpu/PoseTracking.h
index 1953e1530..65f1b392c 100644
--- a/mediapipe/objc/solutions/posetracking_gpu/PoseTracking.h
+++ b/mediapipe/objc/solutions/posetracking_gpu/PoseTracking.h
@@ -6,26 +6,23 @@
#define MEDIAPIPE_POSETRACKING_H
#import
#import "mediapipe/objc/MPPCameraInputSource.h"
-#import "mediapipe/objc/MPPGraph.h"
#import "mediapipe/objc/MPPLayerRenderer.h"
#import "mediapipe/objc/MPPPlayerInputSource.h"
-#import "mediapipe/objc/MPPTimestampConverter.h"
#import "PoseTrackingOptions.h"
#import "PoseTrackingResults.h"
-@interface PoseTracking : NSObject
+@interface PoseTracking : NSObject
// The MediaPipe graph currently in use. Initialized in viewDidLoad, started in
// viewWillAppear: and sent video frames on videoQueue.
-@property(nonatomic) MPPGraph* mediapipeGraph;
+//@property(nonatomic) MPPGraph* mediapipeGraph;
+
-// Helps to convert timestamp.
-@property(nonatomic) MPPTimestampConverter* timestampConverter;
// Render frames in a layer.
@property(nonatomic) MPPLayerRenderer* renderer;
-
+@property (nonatomic) CMTime timeStamp;
// Graph name.
@property(nonatomic) NSString* graphName;
@@ -45,8 +42,15 @@
// Codeblock that runs whenever pose tracking results are available
@property(nonatomic) void(^poseTrackingResultsListener)(PoseTrackingResults*);
+// Codeblock that runs whenever output is available
+@property(nonatomic) void(^graphOutputStreamListener)();
+
- (instancetype) initWithPoseTrackingOptions: (PoseTrackingOptions*) poseTrackingOptions;
+- (void)startGraph;
- (void) startWithCamera: (MPPCameraInputSource*) cameraSource;
+- (void)showLandmarks: (BOOL) value;
+- (BOOL) areLandmarksShown;
+- (void) stopGraph;
@end
diff --git a/mediapipe/objc/solutions/posetracking_gpu/PoseTracking.mm b/mediapipe/objc/solutions/posetracking_gpu/PoseTracking.mm
index d58c4e394..4ea760cd6 100644
--- a/mediapipe/objc/solutions/posetracking_gpu/PoseTracking.mm
+++ b/mediapipe/objc/solutions/posetracking_gpu/PoseTracking.mm
@@ -1,118 +1,56 @@
#include "PoseTracking.h"
#include "mediapipe/framework/formats/landmark.pb.h"
+#import "mediapipe/objc/MPPGraph.h"
+#import "mediapipe/objc/MPPTimestampConverter.h"
+#include "mediapipe/framework/packet.h"
static const char* kVideoQueueLabel = "com.google.mediapipe.example.videoQueue";
static const char* kLandmarksOutputStream = "pose_landmarks";
-@implementation PoseTracking
-#pragma mark - MediaPipe graph methods
-+ (MPPGraph*)loadGraphFromResource:(NSString*)resource {
- // Load the graph config resource.
- NSError* configLoadError = nil;
- NSBundle* bundle = [NSBundle bundleForClass:[self class]];
- if (!resource || resource.length == 0) {
- return nil;
- }
- NSURL* graphURL = [bundle URLForResource:resource withExtension:@"binarypb"];
- NSData* data = [NSData dataWithContentsOfURL:graphURL options:0 error:&configLoadError];
- if (!data) {
- NSLog(@"Failed to load MediaPipe graph config: %@", configLoadError);
- return nil;
- }
+# pragma mark - PoseTrackingGraphDelegate Interface
+@interface PoseTrackingGraphDelegate : NSObject
+// Receives CVPixelBufferRef from the MediaPipe graph. Invoked on a MediaPipe worker thread.
+@property (nonatomic) MPPGraph* mediapipeGraph;
+@property (nonatomic) const char* graphOutputStream;
+@property (nonatomic) MPPLayerRenderer* renderer;
+@property(nonatomic) void(^poseTrackingResultsListener)(PoseTrackingResults*);
+@property(nonatomic) void(^graphOutputStreamListener)();
- // Parse the graph config resource into mediapipe::CalculatorGraphConfig proto object.
- mediapipe::CalculatorGraphConfig config;
- config.ParseFromArray(data.bytes, data.length);
- // Create MediaPipe graph with mediapipe::CalculatorGraphConfig proto object.
- MPPGraph* newGraph = [[MPPGraph alloc] initWithGraphConfig:config];
- return newGraph;
-}
+-(id) initWithMediapipeGraph: (MPPGraph*) graph graphOutputStream: (const char*) graphOutputStream
+ renderer: (MPPLayerRenderer*) renderer;
+- (void)mediapipeGraph:(MPPGraph*)graph
+ didOutputPixelBuffer:(CVPixelBufferRef)pixelBuffer
+ fromStream:(const std::string&)streamName ;
+- (void)mediapipeGraph:(MPPGraph*)graph
+ didOutputPacket:(const ::mediapipe::Packet&)packet
+ fromStream:(const std::string&)streamName ;
-- (instancetype) initWithPoseTrackingOptions: (PoseTrackingOptions*) poseTrackingOptions{
- self.renderer = [[MPPLayerRenderer alloc] init];
- self.renderer.frameScaleMode = MPPFrameScaleModeFillAndCrop;
+@end
- self.timestampConverter = [[MPPTimestampConverter alloc] init];
-
- dispatch_queue_attr_t qosAttribute = dispatch_queue_attr_make_with_qos_class(
- DISPATCH_QUEUE_SERIAL, QOS_CLASS_USER_INTERACTIVE, /*relative_priority=*/0);
- self.videoQueue = dispatch_queue_create(kVideoQueueLabel, qosAttribute);
-
- self.poseTrackingOptions = poseTrackingOptions;
- self.graphName = @"pose_tracking_gpu";
- self.mediapipeGraph = [[self class] loadGraphFromResource: self.graphName];
- self.graphInputStream = "input_video";
-
-
- if (poseTrackingOptions.showLandmarks){
- self.graphOutputStream = "output_video";
- }else{
- self.graphOutputStream = "throttled_input_video";
- }
-
- [self.mediapipeGraph addFrameOutputStream:self.graphOutputStream
- outputPacketType:MPPPacketTypePixelBuffer];
-
-
-
- [self.mediapipeGraph addFrameOutputStream:"pose_landmarks"
- outputPacketType:MPPPacketTypeRaw];
-
- self.mediapipeGraph.delegate = self;
-
- self.poseTrackingResultsListener = ^(PoseTrackingResults*){};
-
+# pragma mark - PoseTrackingGraphDelegate Implementation
+
+@implementation PoseTrackingGraphDelegate
+
+-(id) initWithMediapipeGraph: (MPPGraph*) graph graphOutputStream: (const char*) graphOutputStream
+ renderer: (MPPLayerRenderer*) renderer
+{
+ self.mediapipeGraph = graph;
+ self.graphOutputStream =graphOutputStream;
+ self.renderer = renderer;
return self;
}
-
-
-- (void)startGraph {
- // Start running self.mediapipeGraph.
- NSError* error;
- if (![self.mediapipeGraph startWithError:&error]) {
- NSLog(@"Failed to start graph: %@", error);
- }
- else if (![self.mediapipeGraph waitUntilIdleWithError:&error]) {
- NSLog(@"Failed to complete graph initial run: %@", error);
- }
-}
-
-- (void) startWithCamera: (MPPCameraInputSource*) cameraSource {
- [cameraSource setDelegate:self queue:self.videoQueue];
-
- [self startGraph];
- // Start fetching frames from the camera.
- dispatch_async(self.videoQueue, ^{
- [cameraSource start];
- });
-}
-
-
-#pragma mark - MPPInputSourceDelegate methods
-
-// Must be invoked on self.videoQueue.
-- (void)processVideoFrame:(CVPixelBufferRef)imageBuffer
- timestamp:(CMTime)timestamp
- fromSource:(MPPInputSource*)source {
-
- [self.mediapipeGraph sendPixelBuffer:imageBuffer
- intoStream:self.graphInputStream
- packetType:MPPPacketTypePixelBuffer
- timestamp:[self.timestampConverter timestampForMediaTime:timestamp]];
-}
-
-#pragma mark - MPPGraphDelegate methods
-
// Receives CVPixelBufferRef from the MediaPipe graph. Invoked on a MediaPipe worker thread.
- (void)mediapipeGraph:(MPPGraph*)graph
didOutputPixelBuffer:(CVPixelBufferRef)pixelBuffer
fromStream:(const std::string&)streamName {
if (streamName == self.graphOutputStream) {
+ self.graphOutputStreamListener();
+
// Display the captured image on the screen.
CVPixelBufferRetain(pixelBuffer);
dispatch_async(dispatch_get_main_queue(), ^{
@@ -146,4 +84,170 @@ static const char* kLandmarksOutputStream = "pose_landmarks";
}
}
+
+@end
+
+
+@interface PoseTracking(){
+ // The MediaPipe graph currently in use. Initialized in viewDidLoad, started in
+ // viewWillAppear: and sent video frames on videoQueue.
+ MPPGraph* mediapipeGraph;
+ PoseTrackingGraphDelegate* poseTrackingGraphDelegate;
+ //// Helps to convert timestamp.
+ MPPTimestampConverter* timestampConverter;
+}
+
+@end
+
+@implementation PoseTracking
+
+#pragma mark - MediaPipe graph methods
+
++ (MPPGraph*)loadGraphFromResource:(NSString*)resource {
+ // Load the graph config resource.
+ NSError* configLoadError = nil;
+ NSBundle* bundle = [NSBundle bundleForClass:[self class]];
+ if (!resource || resource.length == 0) {
+ return nil;
+ }
+ NSURL* graphURL = [bundle URLForResource:resource withExtension:@"binarypb"];
+ NSData* data = [NSData dataWithContentsOfURL:graphURL options:0 error:&configLoadError];
+ if (!data) {
+ NSLog(@"Failed to load MediaPipe graph config: %@", configLoadError);
+ return nil;
+ }
+
+ // Parse the graph config resource into mediapipe::CalculatorGraphConfig proto object.
+ mediapipe::CalculatorGraphConfig config;
+ config.ParseFromArray(data.bytes, data.length);
+
+ // Create MediaPipe graph with mediapipe::CalculatorGraphConfig proto object.
+ MPPGraph* newGraph = [[MPPGraph alloc] initWithGraphConfig:config];
+ return newGraph;
+}
+
+- (instancetype) initWithPoseTrackingOptions: (PoseTrackingOptions*) poseTrackingOptions{
+ self.renderer = [[MPPLayerRenderer alloc] init];
+ self.renderer.frameScaleMode = MPPFrameScaleModeFillAndCrop;
+
+ self->timestampConverter = [[MPPTimestampConverter alloc] init];
+
+ dispatch_queue_attr_t qosAttribute = dispatch_queue_attr_make_with_qos_class(
+ DISPATCH_QUEUE_SERIAL, QOS_CLASS_USER_INTERACTIVE, /*relative_priority=*/0);
+ self.videoQueue = dispatch_queue_create(kVideoQueueLabel, qosAttribute);
+
+ self.poseTrackingOptions = poseTrackingOptions;
+ self.graphName = @"pose_tracking_gpu";
+ self->mediapipeGraph = [[self class] loadGraphFromResource: self.graphName];
+ self.graphInputStream = "input_video";
+
+
+ if (poseTrackingOptions.showLandmarks){
+ self.graphOutputStream = "output_video";
+ }else{
+ self.graphOutputStream = "throttled_input_video";
+ }
+
+ [self->mediapipeGraph addFrameOutputStream:"output_video"
+ outputPacketType:MPPPacketTypePixelBuffer];
+ [self->mediapipeGraph addFrameOutputStream:"throttled_input_video"
+ outputPacketType:MPPPacketTypePixelBuffer];
+
+
+ self.poseTrackingResultsListener = ^(PoseTrackingResults*){};
+
+
+ [self->mediapipeGraph addFrameOutputStream:"pose_landmarks"
+ outputPacketType:MPPPacketTypeRaw];
+ self-> poseTrackingGraphDelegate = [[PoseTrackingGraphDelegate alloc] initWithMediapipeGraph:self->mediapipeGraph graphOutputStream:self.graphOutputStream renderer:self.renderer];
+ // To prevent ARC from causing an accidental memory leak in the next block
+ __weak PoseTracking* weakSelf = self;
+ self -> poseTrackingGraphDelegate.poseTrackingResultsListener = ^(PoseTrackingResults* results){
+
+ weakSelf.poseTrackingResultsListener(results);
+ };
+
+ self -> poseTrackingGraphDelegate.graphOutputStreamListener = ^(){
+ if (weakSelf.graphOutputStream != nil)
+ weakSelf.graphOutputStreamListener();
+ };
+
+
+
+
+ self->mediapipeGraph.delegate = self->poseTrackingGraphDelegate;
+
+
+
+ return self;
+}
+
+- (void)showLandmarks: (BOOL) value{
+ if (value){
+ self->poseTrackingGraphDelegate.graphOutputStream = "output_video";
+ }else{
+ self->poseTrackingGraphDelegate.graphOutputStream = "throttled_input_video";
+ }
+
+}
+
+- (BOOL) areLandmarksShown{
+ return self->poseTrackingGraphDelegate.graphOutputStream == "output_video";
+}
+
+- (void)startGraph {
+ // Start running self.mediapipeGraph.
+[self->mediapipeGraph setSidePacket:mediapipe::MakePacket(self.poseTrackingOptions.modelComplexity) named:"model_complexity"];
+ NSError* error;
+ if (![self->mediapipeGraph startWithError:&error]) {
+ NSLog(@"Failed to start graph: %@", error);
+ }
+ else if (![self->mediapipeGraph waitUntilIdleWithError:&error]) {
+ NSLog(@"Failed to complete graph initial run: %@", error);
+ }
+}
+
+- (void) stopGraph {
+ [self->mediapipeGraph cancel];
+ NSError* error;
+ if ([self->mediapipeGraph closeAllInputStreamsWithError: &error]){
+ if (![self->mediapipeGraph waitUntilDoneWithError:&error]){
+ NSLog(@"Failed to stop graph: %@", error);
+
+ }
+ }else {
+ NSLog(@"Failed to close input streams: %@", error);
+
+ }
+
+}
+- (void) startWithCamera: (MPPCameraInputSource*) cameraSource {
+ [cameraSource setDelegate:self queue:self.videoQueue];
+
+ [self startGraph];
+ // Start fetching frames from the camera.
+ dispatch_async(self.videoQueue, ^{
+ [cameraSource start];
+ });
+}
+
+
+#pragma mark - MPPInputSourceDelegate methods
+
+// Must be invoked on self.videoQueue.
+- (void)processVideoFrame:(CVPixelBufferRef)imageBuffer
+ timestamp:(CMTime)timestamp
+ fromSource:(MPPInputSource*)source {
+
+ self.timeStamp = timestamp;
+
+ [self->mediapipeGraph sendPixelBuffer:imageBuffer
+ intoStream:self.graphInputStream
+ packetType:MPPPacketTypePixelBuffer
+ timestamp:[self->timestampConverter timestampForMediaTime:timestamp]];
+}
+
+#pragma mark - MPPGraphDelegate methods
+
+
@end
diff --git a/mediapipe/objc/solutions/posetracking_gpu/PoseTrackingOptions.h b/mediapipe/objc/solutions/posetracking_gpu/PoseTrackingOptions.h
index 40bbff0ae..c7482dc33 100644
--- a/mediapipe/objc/solutions/posetracking_gpu/PoseTrackingOptions.h
+++ b/mediapipe/objc/solutions/posetracking_gpu/PoseTrackingOptions.h
@@ -5,12 +5,16 @@
#ifndef MEDIAPIPE_POSETRACKINGOPTIONS_H
#define MEDIAPIPE_POSETRACKINGOPTIONS_H
#import
-@interface PoseTrackingOptions: NSObject
+
+
+@interface PoseTrackingOptions: NSObject
+@property(nonatomic) int modelComplexity;
@property(nonatomic) bool showLandmarks;
//@property(nonatomic) int cameraRotation;
-- (instancetype) initWithShowLandmarks : (bool) showLandmarks;
+
+- (instancetype) initWithShowLandmarks : (bool) showLandmarks modelComplexity: (int) modelComplexity;
@end
diff --git a/mediapipe/objc/solutions/posetracking_gpu/PoseTrackingOptions.mm b/mediapipe/objc/solutions/posetracking_gpu/PoseTrackingOptions.mm
index f9c8911c3..6b52ce52b 100644
--- a/mediapipe/objc/solutions/posetracking_gpu/PoseTrackingOptions.mm
+++ b/mediapipe/objc/solutions/posetracking_gpu/PoseTrackingOptions.mm
@@ -2,9 +2,10 @@
@implementation PoseTrackingOptions
-- (instancetype) initWithShowLandmarks : (bool) showLandmarks {
+- (instancetype) initWithShowLandmarks : (bool) showLandmarks modelComplexity: (int) modelComplexity{
// self.cameraRotation = cameraRotation;
self.showLandmarks = showLandmarks;
+ self.modelComplexity = modelComplexity;
return self;
}
diff --git a/mediapipe/objc/solutions/posetracking_gpu/PoseTrackingResults.h b/mediapipe/objc/solutions/posetracking_gpu/PoseTrackingResults.h
index c4481121d..cef39dabb 100644
--- a/mediapipe/objc/solutions/posetracking_gpu/PoseTrackingResults.h
+++ b/mediapipe/objc/solutions/posetracking_gpu/PoseTrackingResults.h
@@ -2,6 +2,44 @@
#define MEDIAPIPE_POSETRACKINGRESULTS_H
#import
+
+static const NSInteger POSE_NOSE = 0;
+static const NSInteger POSE_LEFT_EYE_INNER = 1;
+static const NSInteger POSE_LEFT_EYE = 2;
+static const NSInteger POSE_LEFT_EYE_OUTER = 3;
+static const NSInteger POSE_RIGHT_EYE_INNER = 4;
+static const NSInteger POSE_RIGHT_EYE = 5;
+static const NSInteger POSE_RIGHT_EYE_OUTER = 6;
+static const NSInteger POSE_LEFT_EAR = 7;
+static const NSInteger POSE_RIGHT_EAR = 8;
+static const NSInteger POSE_MOUTH_LEFT = 9;
+static const NSInteger POSE_MOUTH_RIGHT = 10;
+static const NSInteger POSE_LEFT_SHOULDER = 11;
+static const NSInteger POSE_RIGHT_SHOULDER = 12;
+static const NSInteger POSE_LEFT_ELBOW = 13;
+static const NSInteger POSE_RIGHT_ELBOW = 14;
+static const NSInteger POSE_LEFT_WRIST = 15;
+static const NSInteger POSE_RIGHT_WRIST = 16;
+static const NSInteger POSE_LEFT_PINKY = 17;
+static const NSInteger POSE_RIGHT_PINKY = 18;
+static const NSInteger POSE_LEFT_INDEX = 19;
+static const NSInteger POSE_RIGHT_INDEX = 20;
+static const NSInteger POSE_LEFT_THUMB = 21;
+static const NSInteger POSE_RIGHT_THUMB = 22;
+static const NSInteger POSE_LEFT_HIP = 23;
+static const NSInteger POSE_RIGHT_HIP = 24;
+static const NSInteger POSE_LEFT_KNEE = 25;
+static const NSInteger POSE_RIGHT_KNEE = 26;
+static const NSInteger POSE_LEFT_ANKLE = 27;
+static const NSInteger POSE_RIGHT_ANKLE = 28;
+static const NSInteger POSE_LEFT_HEEL = 29;
+static const NSInteger POSE_RIGHT_HEEL = 30;
+static const NSInteger POSE_LEFT_FOOT = 31;
+static const NSInteger POSE_RIGHT_FOOT = 32;
+
+
+
+
@interface PoseLandmark: NSObject
@property float x;
diff --git a/mediapipe/objc/solutions/posetracking_gpu/PoseTrackingResults.mm b/mediapipe/objc/solutions/posetracking_gpu/PoseTrackingResults.mm
index 744b2dde4..cc1c0ea9b 100644
--- a/mediapipe/objc/solutions/posetracking_gpu/PoseTrackingResults.mm
+++ b/mediapipe/objc/solutions/posetracking_gpu/PoseTrackingResults.mm
@@ -7,6 +7,8 @@
self.x = x;
self.y = y;
self.z = z;
+ self.presence = presence;
+ self.visibility = visibility;
return self;
}
diff --git a/mediapipe/swift/solutions/lindera/Asensei3D.swift b/mediapipe/swift/solutions/lindera/Asensei3D.swift
new file mode 100644
index 000000000..f2eb03ef2
--- /dev/null
+++ b/mediapipe/swift/solutions/lindera/Asensei3D.swift
@@ -0,0 +1,224 @@
+import Foundation
+
+public struct Asensei3DPose {
+
+
+ public let nose: BodyJointDetails?
+
+ public let leftEyeInner: BodyJointDetails?
+ public let leftEye: BodyJointDetails?
+ public let leftEyeOuter: BodyJointDetails?
+
+ public let rightEyeInner: BodyJointDetails?
+ public let rightEye: BodyJointDetails?
+ public let rightEyeOuter: BodyJointDetails?
+
+ public let leftEar: BodyJointDetails?
+ public let rightEar: BodyJointDetails?
+
+ public let mouthLeft: BodyJointDetails?
+ public let mouthRight: BodyJointDetails?
+
+ public let leftShoulder: BodyJointDetails?
+ public let rightShoulder: BodyJointDetails?
+
+ public let leftElbow: BodyJointDetails?
+ public let rightElbow: BodyJointDetails?
+
+ public let leftWrist: BodyJointDetails?
+ public let rightWrist: BodyJointDetails?
+
+ public let leftPinky: BodyJointDetails?
+ public let rightPinky: BodyJointDetails?
+
+ public let leftIndex: BodyJointDetails?
+ public let rightIndex: BodyJointDetails?
+
+ public let leftThumb: BodyJointDetails?
+ public let rightThumb: BodyJointDetails?
+
+ public let leftHip: BodyJointDetails?
+ public let rightHip: BodyJointDetails?
+
+ public let leftKnee: BodyJointDetails?
+ public let rightKnee: BodyJointDetails?
+
+ public let rightAnkle: BodyJointDetails?
+ public let leftAnkle: BodyJointDetails?
+
+
+ public let rightHeel: BodyJointDetails?
+ public let leftHeel: BodyJointDetails?
+
+ public let rightFoot: BodyJointDetails?
+ public let leftFoot: BodyJointDetails?
+}
+
+extension Asensei3DPose: Encodable {
+
+ private enum CodingKeys: String, CodingKey {
+ case nose
+
+ case leftEyeInner
+ case leftEye
+ case leftEyeOuter
+
+ case rightEyeInner
+ case rightEye
+ case rightEyeOuter
+
+ case leftEar
+ case rightEar
+
+ case mouthLeft
+ case mouthRight
+
+ case leftShoulder
+ case rightShoulder
+
+ case leftElbow
+ case rightElbow
+
+ case leftWrist
+ case rightWrist
+
+ case leftPinky
+ case rightPinky
+
+ case leftIndex
+ case rightIndex
+
+ case leftThumb
+ case rightThumb
+
+ case leftHip
+ case rightHip
+
+ case leftKnee
+ case rightKnee
+
+ case rightAnkle
+ case leftAnkle
+
+
+ case rightHeel
+ case leftHeel
+
+ case rightFoot
+ case leftFoot
+ }
+
+ public func encode(to encoder: Encoder) throws {
+ var container = encoder.container(keyedBy: CodingKeys.self)
+
+ try container.encodeIfPresent(self.nose, forKey: .nose)
+
+ try container.encodeIfPresent(self.leftEyeInner, forKey: .leftEyeInner)
+ try container.encodeIfPresent(self.leftEye, forKey:.leftEye )
+ try container.encodeIfPresent(self.leftEyeOuter, forKey: .leftEyeOuter)
+
+ try container.encodeIfPresent(self.rightEyeInner, forKey: .rightEyeInner)
+ try container.encodeIfPresent(self.rightEye, forKey: .rightEye)
+ try container.encodeIfPresent(self.rightEyeOuter, forKey: .rightEyeOuter )
+
+ try container.encodeIfPresent(self.leftEar, forKey: .leftEar)
+ try container.encodeIfPresent(self.rightEar, forKey: .rightEar)
+
+ try container.encodeIfPresent(self.mouthLeft, forKey: .mouthLeft)
+ try container.encodeIfPresent(self.mouthRight, forKey: .mouthRight )
+
+ try container.encodeIfPresent(self.leftShoulder, forKey: .leftShoulder)
+ try container.encodeIfPresent(self.rightShoulder, forKey: .rightShoulder)
+
+ try container.encodeIfPresent(self.leftElbow, forKey: .leftElbow)
+ try container.encodeIfPresent(self.rightElbow, forKey:.rightElbow )
+
+ try container.encodeIfPresent(self.leftWrist, forKey: .leftWrist)
+ try container.encodeIfPresent(self.rightWrist, forKey: .rightWrist )
+
+ try container.encodeIfPresent(self.leftPinky, forKey: .leftPinky)
+ try container.encodeIfPresent(self.rightPinky, forKey: .rightPinky)
+
+ try container.encodeIfPresent(self.leftIndex, forKey: .leftIndex )
+ try container.encodeIfPresent(self.rightIndex, forKey:.rightIndex )
+
+ try container.encodeIfPresent(self.leftThumb, forKey: .leftThumb)
+ try container.encodeIfPresent(self.rightThumb, forKey: .rightThumb )
+
+ try container.encodeIfPresent(self.leftHip, forKey: .leftHip)
+ try container.encodeIfPresent(self.rightHip, forKey: .rightHip )
+
+ try container.encodeIfPresent(self.leftKnee, forKey: .leftKnee )
+ try container.encodeIfPresent(self.rightKnee, forKey: .rightKnee )
+
+ try container.encodeIfPresent(self.rightAnkle, forKey: .rightAnkle)
+ try container.encodeIfPresent(self.leftAnkle, forKey: .leftAnkle )
+
+
+ try container.encodeIfPresent(self.rightHeel, forKey: .rightHeel)
+ try container.encodeIfPresent(self.leftHeel, forKey: .leftHeel)
+
+ try container.encodeIfPresent(self.rightFoot, forKey: .rightFoot )
+ try container.encodeIfPresent(self.leftFoot, forKey: .leftFoot)
+
+
+ }
+}
+
+extension Asensei3DPose {
+
+ public struct BodyJointDetails: Encodable {
+
+ public let position: Vector3D
+ public let confidence: Float
+
+ private enum CodingKeys: String, CodingKey {
+ case x
+ case y
+ case z
+ case c
+ }
+
+ public func encode(to encoder: Encoder) throws {
+ var container = encoder.container(keyedBy: CodingKeys.self)
+ try container.encode(self.position.x, forKey: .x)
+ try container.encode(self.position.y, forKey: .y)
+ try container.encode(self.position.z, forKey: .z)
+ try container.encode(self.confidence, forKey: .c)
+ }
+ }
+}
+
+extension Asensei3DPose {
+
+ public struct Vector3D {
+ public let x: Float
+ public let y: Float
+ public let z: Float
+
+ public init(x: Float, y: Float, z: Float) {
+ self.x = x
+ self.y = y
+ self.z = z
+ }
+ }
+}
+
+extension Asensei3DPose {
+
+ public struct Event: Encodable {
+ public let pose: Asensei3DPose
+ let timestamp: TimeInterval
+
+ private enum CodingKeys: String, CodingKey {
+ case bodyJoints
+ case timestamp
+ }
+
+ public func encode(to encoder: Encoder) throws {
+ var container = encoder.container(keyedBy: CodingKeys.self)
+ try container.encode(self.pose, forKey: .bodyJoints)
+ try container.encode(self.timestamp * 1000, forKey: .timestamp)
+ }
+ }
+}
diff --git a/mediapipe/swift/solutions/lindera/BUILD b/mediapipe/swift/solutions/lindera/BUILD
new file mode 100644
index 000000000..42c4f0851
--- /dev/null
+++ b/mediapipe/swift/solutions/lindera/BUILD
@@ -0,0 +1,26 @@
+load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library")
+
+swift_library(
+ name = "lindera",
+ srcs = ["Lindera.swift","Asensei3D.swift","utils.swift"],
+ linkopts = [
+ "-lc++",
+ "-std=c++17",
+ "-lstdc++",
+ ],
+ module_name = "LinderaDetection",
+ visibility = ["//visibility:public"],
+ deps = [
+ "//mediapipe/objc/solutions/posetracking_gpu:posetracking_gpu_solution",
+ "//mediapipe/objc:mediapipe_framework_ios",
+ "//mediapipe/objc:mediapipe_input_sources_ios",
+ "//mediapipe/objc:mediapipe_layer_renderer",
+ ] + select({
+ "//mediapipe:ios_i386": [],
+ "//mediapipe:ios_x86_64": [],
+ "//conditions:default": [
+ "//mediapipe/graphs/pose_tracking:pose_tracking_gpu_deps",
+ "//mediapipe/framework/formats:landmark_cc_proto",
+ ],
+ }),
+)
diff --git a/mediapipe/swift/solutions/lindera/Lindera.swift b/mediapipe/swift/solutions/lindera/Lindera.swift
new file mode 100644
index 000000000..2943c2416
--- /dev/null
+++ b/mediapipe/swift/solutions/lindera/Lindera.swift
@@ -0,0 +1,267 @@
+// This is the copperlabs posetracking api built in objective c
+import MPPoseTracking
+import UIKit
+
+
+/// A helper class to run the Pose Tracking API
+/// TFLite models are also loaded when you initialize this class
+public final class Lindera{
+
+
+
+ //MARK: - Public Class API
+
+
+ // A delegate to handle results
+ public weak var delegate: LinderaDelegate?
+
+ /// This function sets up your callback function to happen whenver there is an fps update
+ public func setFpsDelegate(fpsDelegate: @escaping (_ fps:Double)->Void){
+ fpsHelper.onFpsUpdate = fpsDelegate;
+ }
+
+ // Get the camera UI View that may contain landmarks drawing
+ public var cameraView: UIView {
+ return self.linderaExerciseSession
+ }
+
+
+ // Show Landmarks - works instantaneously!
+ public func showLandmarks(value:Bool){
+ self.poseTracking.showLandmarks(value)
+ }
+ // Are landmarks already drawn?
+ public func areLandmarksShown() -> Bool{
+ return self.poseTracking.areLandmarksShown()
+ }
+ // Current Model Complexity 0 -> lite; 1 -> full ; 2 -> heavy
+ public func getModelComplexity() -> Int {
+ return Int(self.poseTracking.poseTrackingOptions.modelComplexity);
+ }
+
+ // Set the model complexity and restart detection to load new models
+ public func setModelComplexityNow(complexity:Int){
+ let poseTrackingOptions = poseTracking.poseTrackingOptions
+
+ poseTrackingOptions?.modelComplexity = Int32(complexity)
+
+ poseTracking = PoseTracking(poseTrackingOptions: poseTrackingOptions)
+ startPoseTracking()
+ startCamera()
+
+ }
+
+ public required init(){
+
+ startPoseTracking()
+ }
+
+
+ public func startCamera(_ completion: ((Result) -> Void)? = nil) {
+ // set our rendering layer frame according to cameraView boundry
+ self.poseTracking.renderer.layer.frame = cameraView.layer.bounds
+ // attach render CALayer on cameraView to render output to
+ self.cameraView.layer.addSublayer(self.poseTracking.renderer.layer)
+
+ self.cameraSource.requestCameraAccess(
+ completionHandler: {(granted:Bool)->Void in
+ if (granted){
+ self.poseTracking.videoQueue.async(execute:{ [weak self] in
+
+ self?.cameraSource.start()
+
+ } )
+ completion?(.success(Void()))
+ }else{
+
+ completion?(.failure(preconditionFailure("Camera Access Not Granted")))
+
+ }
+ })
+
+
+
+
+ }
+ /// Choose front or back camera. Must restart camera after use if already started
+ public func selectCamera(_ position: AVCaptureDevice.Position, _ completion: ((Result) -> Void)? = nil) {
+ self.poseTracking.videoQueue.async { [weak self] in
+ self?.cameraSource.cameraPosition = position
+ completion?(.success(Void()))
+ }
+
+ }
+
+
+ // MARK: - Private Class Functions
+
+ // Set your custom view heree
+ private lazy var linderaExerciseSession: UIView = {
+
+ // this will be the main camera view; Change it to custom view class to get desired results
+ let liveView = UIView()
+
+ return liveView
+
+ }()
+
+
+ private func startPoseTracking(){
+ // set camera preferences
+ self.cameraSource.sessionPreset = AVCaptureSession.Preset.high.rawValue
+ self.cameraSource.cameraPosition = AVCaptureDevice.Position.front
+ self.cameraSource.orientation = AVCaptureVideoOrientation.portrait
+ if (self.cameraSource.orientation == AVCaptureVideoOrientation.portrait){
+ self.cameraSource.videoMirrored = true
+ }
+ // call LinderaDelegate on pose tracking results
+ self.poseTracking.poseTrackingResultsListener = {[weak self] results in
+
+
+ guard let self = self, let results = results else {
+ return
+ }
+
+ self.delegate?.lindera(self, didDetect: .init(pose: Asensei3DPose.init(results), timestamp: CMTimeGetSeconds(self.poseTracking.timeStamp)))
+ }
+ self.poseTracking.graphOutputStreamListener = {[weak self] in
+ self?.fpsHelper.logTime()
+ }
+
+ self.poseTracking.startGraph()
+ // attach camera's output with poseTracking object and its videoQueue
+ self.cameraSource.setDelegate(self.poseTracking, queue: self.poseTracking.videoQueue)
+ }
+
+
+ func stopCamera(){
+ if (self.cameraSource.isRunning){
+ self.poseTracking.videoQueue.async { [weak self] in
+ self?.cameraSource.stop()
+ }
+
+ }
+ }
+
+ /// switches camera from front to back and vice versa
+ func switchCamera(_ completion: ((Result) -> Void)? = nil) {
+ self.poseTracking.videoQueue.async { [weak self] in
+ if let self = self {
+
+ self.stopCamera()
+ self.startCamera(completion)
+
+ switch(self.cameraSource.cameraPosition){
+
+ case .unspecified:
+ completion?(.failure(preconditionFailure("Unkown Camera Position")))
+ case .back:
+ self.selectCamera(AVCaptureDevice.Position.front,completion)
+ case .front:
+ self.selectCamera(AVCaptureDevice.Position.back,completion)
+ @unknown default:
+ completion?(.failure(preconditionFailure("Unkown Camera Position")))
+
+ }
+
+
+ }
+
+ }
+ }
+
+
+
+ // MARK: - Private Class Objects
+ // initalize the PoseTracking api and load models
+ var poseTracking:PoseTracking = PoseTracking(poseTrackingOptions: PoseTrackingOptions(showLandmarks: true,modelComplexity: 1))
+
+ // Needed to get fps of model
+ let fpsHelper = FPSHelper(smoothingFactor: 0.95)
+
+ // attach Mediapipe camera helper to our class
+ let cameraSource = MPPCameraInputSource()
+
+
+
+}
+
+
+public protocol LinderaDelegate: AnyObject {
+
+ func lindera(_ lindera: Lindera, didDetect event: Asensei3DPose.Event)
+}
+
+
+/// Convert PoseLandmarks from PoseTrackingAPI to BodyJointDetails
+func landmarkToBodyJointDetails(landmark: PoseLandmark) -> Asensei3DPose.BodyJointDetails{
+ return Asensei3DPose.BodyJointDetails(position: .init(x: landmark.x, y: landmark.y, z: landmark.z), confidence: landmark.visibility)
+}
+// MARK: - Helpers
+extension Asensei3DPose {
+
+ init(_ pose: PoseTrackingResults) {
+
+ self.nose = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_NOSE])
+
+ self.leftEyeInner = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_EYE_INNER])
+ self.leftEye = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_EYE])
+ self.leftEyeOuter = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_EYE_OUTER])
+
+ self.rightEyeInner = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_EYE_OUTER])
+ self.rightEye = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_EYE])
+ self.rightEyeOuter = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_EYE_OUTER])
+
+ self.leftEar = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_EAR])
+ self.rightEar = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_EAR])
+
+ self.mouthLeft = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_MOUTH_LEFT])
+ self.mouthRight = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_MOUTH_RIGHT])
+
+ self.leftShoulder = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_SHOULDER])
+ self.rightShoulder = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_SHOULDER])
+
+ self.leftElbow = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_ELBOW])
+ self.rightElbow = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_ELBOW])
+
+ self.leftWrist = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_WRIST])
+ self.rightWrist = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_WRIST])
+
+ self.leftPinky = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_PINKY])
+ self.rightPinky = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_PINKY])
+
+ self.leftIndex = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_INDEX])
+ self.rightIndex = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_INDEX])
+
+ self.leftThumb = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_THUMB])
+ self.rightThumb = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_THUMB])
+
+ self.leftHip = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_HIP])
+ self.rightHip = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_HIP])
+
+ self.leftKnee = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_KNEE])
+ self.rightKnee = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_KNEE])
+
+ self.rightAnkle = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_ANKLE])
+ self.leftAnkle = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_ANKLE])
+
+
+ self.rightHeel = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_HEEL])
+ self.leftHeel = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_HEEL])
+
+ self.rightFoot = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_FOOT])
+ self.leftFoot = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_FOOT])
+
+
+
+ }
+}
+
+//extension Asensei3DPose.Vector3D {
+//
+// init(_ vector: Lindera3DVector) {
+// self.x = -vector.x
+// self.y = vector.z
+// self.z = vector.y
+// }
+//}
diff --git a/mediapipe/swift/solutions/lindera/utils.swift b/mediapipe/swift/solutions/lindera/utils.swift
new file mode 100644
index 000000000..846d34214
--- /dev/null
+++ b/mediapipe/swift/solutions/lindera/utils.swift
@@ -0,0 +1,45 @@
+//
+// utils.swift
+// Mediapipe
+//
+// Created by Mautisim Munir on 21/10/2022.
+//
+
+import Foundation
+
+
+public class FPSHelper{
+ var smoothingFactor = 0.8
+ var _fps:Double? = nil
+ var time: CFAbsoluteTime? = nil
+ public var onFpsUpdate : ((_ fps:Double)->Void)? = nil
+ init(smoothingFactor:Double) {
+ self.smoothingFactor = smoothingFactor
+ }
+
+ public func logTime(){
+
+ let currTime = CFAbsoluteTimeGetCurrent()
+ if (time != nil){
+
+ let elapsedTime = currTime - time!
+ let fps = 1/Double(elapsedTime)
+ if (_fps == nil){
+ _fps = fps
+ }else{
+ _fps = (1-smoothingFactor)*fps + smoothingFactor*_fps!
+ }
+ if (onFpsUpdate != nil){
+ onFpsUpdate?(_fps!)
+ }
+
+ }
+ time = currTime
+
+ }
+
+
+
+
+
+}