Merge branch 'android-demo-app' of github.com:udamaster/mediapipe into android-demo-app
This commit is contained in:
commit
4be052917a
35
WORKSPACE
35
WORKSPACE
|
@ -316,17 +316,23 @@ http_archive(
|
|||
|
||||
# iOS basic build deps.
|
||||
|
||||
#http_archive(
|
||||
# name = "build_bazel_rules_apple",
|
||||
# patch_args = [
|
||||
# "-p1",
|
||||
# ],
|
||||
# patches = [
|
||||
# # Bypass checking ios unit test runner when building MP ios applications.
|
||||
# "@//third_party:build_bazel_rules_apple_bypass_test_runner_check.diff",
|
||||
# ],
|
||||
# sha256 = "77e8bf6fda706f420a55874ae6ee4df0c9d95da6c7838228b26910fc82eea5a2",
|
||||
# url = "https://github.com/bazelbuild/rules_apple/releases/download/0.32.0/rules_apple.0.32.0.tar.gz",
|
||||
#)
|
||||
|
||||
http_archive(
|
||||
name = "build_bazel_rules_apple",
|
||||
patch_args = [
|
||||
"-p1",
|
||||
],
|
||||
patches = [
|
||||
# Bypass checking ios unit test runner when building MP ios applications.
|
||||
"@//third_party:build_bazel_rules_apple_bypass_test_runner_check.diff",
|
||||
],
|
||||
sha256 = "77e8bf6fda706f420a55874ae6ee4df0c9d95da6c7838228b26910fc82eea5a2",
|
||||
url = "https://github.com/bazelbuild/rules_apple/releases/download/0.32.0/rules_apple.0.32.0.tar.gz",
|
||||
sha256 = "90e3b5e8ff942be134e64a83499974203ea64797fd620eddeb71b3a8e1bff681",
|
||||
url = "https://github.com/bazelbuild/rules_apple/releases/download/1.1.2/rules_apple.1.1.2.tar.gz",
|
||||
)
|
||||
|
||||
load(
|
||||
|
@ -343,6 +349,13 @@ load(
|
|||
|
||||
swift_rules_dependencies()
|
||||
|
||||
load(
|
||||
"@build_bazel_rules_swift//swift:extras.bzl",
|
||||
"swift_rules_extra_dependencies",
|
||||
)
|
||||
|
||||
swift_rules_extra_dependencies()
|
||||
|
||||
http_archive(
|
||||
name = "build_bazel_apple_support",
|
||||
sha256 = "741366f79d900c11e11d8efd6cc6c66a31bfb2451178b58e0b5edc6f1db17b35",
|
||||
|
@ -515,11 +528,11 @@ load("@build_bazel_rules_android//android:rules.bzl", "android_ndk_repository",
|
|||
android_sdk_repository(
|
||||
name = "androidsdk",
|
||||
build_tools_version = "30.0.3",
|
||||
# path = "/Users/tj/Library/Android/sdk", # Path to Android SDK, optional if $ANDROID_HOME is set
|
||||
# path = "/Users/tj/Library/Android/sdk", # Path to Android SDK, optional if $ANDROID_HOME is set
|
||||
)
|
||||
|
||||
android_ndk_repository(
|
||||
name = "androidndk", # Required. Name *must* be "androidndk".
|
||||
api_level = 21,
|
||||
# path = "/Users/tj/Library/Android/sdk/ndk/21.4.7075529", # Optional. Can be omitted if `ANDROID_NDK_HOME` environment variable is set.
|
||||
# path = "/Users/tj/Library/Android/sdk/ndk/21.4.7075529", # Optional. Can be omitted if `ANDROID_NDK_HOME` environment variable is set.
|
||||
)
|
||||
|
|
|
@ -23,7 +23,8 @@
|
|||
"mediapipe/objc/testing/app/BUILD"
|
||||
],
|
||||
"buildTargets" : [
|
||||
"//mediapipe/examples/ios/posetrackingsolutiongpu:PoseTrackingSolutionGpuApp"
|
||||
"//mediapipe/examples/ios/posetracking-lindera/PoseTrackingLindera:posetracking-lindera",
|
||||
"//mediapipe/swift/solutions/lindera:lindera"
|
||||
],
|
||||
"optionSet" : {
|
||||
"BazelBuildOptionsDebug" : {
|
||||
|
@ -90,7 +91,14 @@
|
|||
"mediapipe/examples/ios/iristrackinggpu",
|
||||
"mediapipe/examples/ios/objectdetectioncpu",
|
||||
"mediapipe/examples/ios/objectdetectiongpu",
|
||||
"mediapipe/examples/ios/posetracking-lindera",
|
||||
"mediapipe/examples/ios/posetracking-lindera/PoseTrackingLindera",
|
||||
"mediapipe/examples/ios/posetracking-lindera/PoseTrackingLindera/Base.lproj",
|
||||
"mediapipe/examples/ios/posetrackinggpu",
|
||||
"mediapipe/examples/ios/posetrackingsolution-swift",
|
||||
"mediapipe/examples/ios/posetrackingsolution-swift/Camera",
|
||||
"mediapipe/examples/ios/posetrackingsolution-swift/ViewModels",
|
||||
"mediapipe/examples/ios/posetrackingsolution-swift/Views",
|
||||
"mediapipe/examples/ios/posetrackingsolutiongpu",
|
||||
"mediapipe/examples/ios/posetrackingsolutiongpu/Base.lproj",
|
||||
"mediapipe/examples/ios/selfiesegmentationgpu",
|
||||
|
@ -117,6 +125,9 @@
|
|||
"mediapipe/objc",
|
||||
"mediapipe/objc/solutions",
|
||||
"mediapipe/objc/solutions/posetracking_gpu",
|
||||
"mediapipe/swift",
|
||||
"mediapipe/swift/solutions",
|
||||
"mediapipe/swift/solutions/lindera",
|
||||
"mediapipe/util",
|
||||
"mediapipe/util/android",
|
||||
"mediapipe/util/android/file",
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
"configDefaults" : {
|
||||
"optionSet" : {
|
||||
"BazelBuildOptionsDebug" : {
|
||||
"p" : "--config=debug --strip=never --features=oso_prefix_is_pwd --apple_generate_dsym"
|
||||
"p" : "--strip=never --features=oso_prefix_is_pwd --apple_generate_dsym"
|
||||
},
|
||||
"CLANG_CXX_LANGUAGE_STANDARD" : {
|
||||
"p" : "c++14"
|
||||
|
@ -24,11 +24,13 @@
|
|||
"mediapipe/examples/ios/objectdetectioncpu",
|
||||
"mediapipe/examples/ios/objectdetectiongpu",
|
||||
"mediapipe/examples/ios/objectdetectiontrackinggpu",
|
||||
"mediapipe/examples/ios/posetracking-lindera/PoseTrackingLindera",
|
||||
"mediapipe/examples/ios/posetrackinggpu",
|
||||
"mediapipe/examples/ios/posetrackingsolutiongpu",
|
||||
"mediapipe/examples/ios/selfiesegmentationgpu",
|
||||
"mediapipe/objc",
|
||||
"mediapipe/objc/solutions/posetracking_gpu"
|
||||
"mediapipe/objc/solutions/posetracking_gpu",
|
||||
"mediapipe/swift/solutions/lindera"
|
||||
],
|
||||
"projectName" : "Mediapipe",
|
||||
"workspaceRoot" : "../.."
|
||||
|
|
|
@ -0,0 +1,22 @@
|
|||
//
|
||||
// AppDelegate.swift
|
||||
// PoseTrackingLindera
|
||||
//
|
||||
// Created by Mautisim Munir on 17/10/2022.
|
||||
//
|
||||
|
||||
import UIKit
|
||||
|
||||
@main
|
||||
class AppDelegate: UIResponder, UIApplicationDelegate {
|
||||
|
||||
var window: UIWindow?
|
||||
|
||||
func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool {
|
||||
// Override point for customization after application launch.
|
||||
return true
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,11 @@
|
|||
{
|
||||
"colors" : [
|
||||
{
|
||||
"idiom" : "universal"
|
||||
}
|
||||
],
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
|
@ -0,0 +1,93 @@
|
|||
{
|
||||
"images" : [
|
||||
{
|
||||
"idiom" : "iphone",
|
||||
"scale" : "2x",
|
||||
"size" : "20x20"
|
||||
},
|
||||
{
|
||||
"idiom" : "iphone",
|
||||
"scale" : "3x",
|
||||
"size" : "20x20"
|
||||
},
|
||||
{
|
||||
"idiom" : "iphone",
|
||||
"scale" : "2x",
|
||||
"size" : "29x29"
|
||||
},
|
||||
{
|
||||
"idiom" : "iphone",
|
||||
"scale" : "3x",
|
||||
"size" : "29x29"
|
||||
},
|
||||
{
|
||||
"idiom" : "iphone",
|
||||
"scale" : "2x",
|
||||
"size" : "40x40"
|
||||
},
|
||||
{
|
||||
"idiom" : "iphone",
|
||||
"scale" : "3x",
|
||||
"size" : "40x40"
|
||||
},
|
||||
{
|
||||
"idiom" : "iphone",
|
||||
"scale" : "2x",
|
||||
"size" : "60x60"
|
||||
},
|
||||
{
|
||||
"idiom" : "iphone",
|
||||
"scale" : "3x",
|
||||
"size" : "60x60"
|
||||
},
|
||||
{
|
||||
"idiom" : "ipad",
|
||||
"scale" : "1x",
|
||||
"size" : "20x20"
|
||||
},
|
||||
{
|
||||
"idiom" : "ipad",
|
||||
"scale" : "2x",
|
||||
"size" : "20x20"
|
||||
},
|
||||
{
|
||||
"idiom" : "ipad",
|
||||
"scale" : "1x",
|
||||
"size" : "29x29"
|
||||
},
|
||||
{
|
||||
"idiom" : "ipad",
|
||||
"scale" : "2x",
|
||||
"size" : "29x29"
|
||||
},
|
||||
{
|
||||
"idiom" : "ipad",
|
||||
"scale" : "1x",
|
||||
"size" : "40x40"
|
||||
},
|
||||
{
|
||||
"idiom" : "ipad",
|
||||
"scale" : "2x",
|
||||
"size" : "40x40"
|
||||
},
|
||||
{
|
||||
"idiom" : "ipad",
|
||||
"scale" : "2x",
|
||||
"size" : "76x76"
|
||||
},
|
||||
{
|
||||
"idiom" : "ipad",
|
||||
"scale" : "2x",
|
||||
"size" : "83.5x83.5"
|
||||
},
|
||||
{
|
||||
"idiom" : "ios-marketing",
|
||||
"scale" : "1x",
|
||||
"size" : "1024x1024"
|
||||
}
|
||||
],
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
|
@ -0,0 +1,6 @@
|
|||
{
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
|
@ -0,0 +1,81 @@
|
|||
load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library")
|
||||
load("@build_bazel_rules_apple//apple:ios.bzl", "ios_application")
|
||||
load("@build_bazel_rules_apple//apple:resources.bzl", "apple_resource_bundle")
|
||||
load(
|
||||
"//mediapipe/examples/ios:bundle_id.bzl",
|
||||
"BUNDLE_ID_PREFIX",
|
||||
"example_provisioning",
|
||||
)
|
||||
|
||||
MIN_IOS_VERSION = "14.0"
|
||||
|
||||
swift_library(
|
||||
name = "lindera_app_lib",
|
||||
srcs = glob(["**/*.swift"]),
|
||||
data =[
|
||||
"Base.lproj/LaunchScreen.storyboard",
|
||||
"Base.lproj/Main.storyboard",
|
||||
],
|
||||
linkopts = [
|
||||
"-lc++",
|
||||
"-std=c++17",
|
||||
"-lstdc++",
|
||||
],
|
||||
module_name = "lindera_app_lib",
|
||||
visibility = ["//visibility:public"],
|
||||
deps = [
|
||||
"@ios_opencv//:OpencvFramework",
|
||||
] + [
|
||||
"//mediapipe/swift/solutions/lindera:lindera",
|
||||
],
|
||||
|
||||
# +
|
||||
# [
|
||||
# "//mediapipe/objc/solutions/posetracking_gpu:posetracking_gpu_solution",
|
||||
# "//mediapipe/objc:mediapipe_framework_ios",
|
||||
# "//mediapipe/objc:mediapipe_input_sources_ios",
|
||||
# "//mediapipe/objc:mediapipe_layer_renderer",
|
||||
# ] + select({
|
||||
# "//mediapipe:ios_i386": [],
|
||||
# "//mediapipe:ios_x86_64": [],
|
||||
# "//conditions:default": [
|
||||
# "//mediapipe/graphs/pose_tracking:pose_tracking_gpu_deps",
|
||||
# "//mediapipe/framework/formats:landmark_cc_proto",
|
||||
# ],
|
||||
# })
|
||||
)
|
||||
|
||||
apple_resource_bundle(
|
||||
name = "app_resources",
|
||||
bundle_id = BUNDLE_ID_PREFIX + ".PoseTrackingGpu",
|
||||
resources = [
|
||||
"Assets.xcassets",
|
||||
],
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
|
||||
ios_application(
|
||||
name = "posetracking-lindera",
|
||||
bundle_id = BUNDLE_ID_PREFIX + ".PoseTrackingGpu",
|
||||
families = [
|
||||
"iphone",
|
||||
"ipad",
|
||||
],
|
||||
infoplists = [
|
||||
"Info.plist",
|
||||
"//mediapipe/examples/ios/common:Info.plist",
|
||||
],
|
||||
linkopts = [
|
||||
"-lc++",
|
||||
],
|
||||
minimum_os_version = MIN_IOS_VERSION,
|
||||
provisioning_profile = example_provisioning(),
|
||||
resources = [":app_resources"],
|
||||
visibility = ["//visibility:public"],
|
||||
deps = [
|
||||
":lindera_app_lib",
|
||||
":app_resources",
|
||||
|
||||
# "@ios_opencv//:OpencvFramework",
|
||||
],
|
||||
)
|
|
@ -0,0 +1,32 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="20037" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" launchScreen="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES" initialViewController="01J-lp-oVM">
|
||||
<device id="retina6_1" orientation="portrait" appearance="light"/>
|
||||
<dependencies>
|
||||
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="20020"/>
|
||||
<capability name="Safe area layout guides" minToolsVersion="9.0"/>
|
||||
<capability name="System colors in document resources" minToolsVersion="11.0"/>
|
||||
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
|
||||
</dependencies>
|
||||
<scenes>
|
||||
<!--View Controller-->
|
||||
<scene sceneID="EHf-IW-A2E">
|
||||
<objects>
|
||||
<viewController id="01J-lp-oVM" sceneMemberID="viewController">
|
||||
<view key="view" contentMode="scaleToFill" id="Ze5-6b-2t3">
|
||||
<rect key="frame" x="0.0" y="0.0" width="414" height="896"/>
|
||||
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
|
||||
<viewLayoutGuide key="safeArea" id="6Tk-OE-BBY"/>
|
||||
<color key="backgroundColor" systemColor="systemBackgroundColor"/>
|
||||
</view>
|
||||
</viewController>
|
||||
<placeholder placeholderIdentifier="IBFirstResponder" id="iYj-Kq-Ea1" userLabel="First Responder" sceneMemberID="firstResponder"/>
|
||||
</objects>
|
||||
<point key="canvasLocation" x="53" y="375"/>
|
||||
</scene>
|
||||
</scenes>
|
||||
<resources>
|
||||
<systemColor name="systemBackgroundColor">
|
||||
<color white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
|
||||
</systemColor>
|
||||
</resources>
|
||||
</document>
|
|
@ -0,0 +1,84 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="20037" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES" initialViewController="BYZ-38-t0r">
|
||||
<device id="retina6_1" orientation="portrait" appearance="light"/>
|
||||
<dependencies>
|
||||
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="20020"/>
|
||||
<capability name="Safe area layout guides" minToolsVersion="9.0"/>
|
||||
<capability name="System colors in document resources" minToolsVersion="11.0"/>
|
||||
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
|
||||
</dependencies>
|
||||
<scenes>
|
||||
<!--View Controller-->
|
||||
<scene sceneID="tne-QT-ifu">
|
||||
<objects>
|
||||
<viewController id="BYZ-38-t0r" customClass="ViewController" customModuleProvider="target" sceneMemberID="viewController">
|
||||
<view key="view" contentMode="scaleToFill" id="8bC-Xf-vdC">
|
||||
<rect key="frame" x="0.0" y="0.0" width="414" height="896"/>
|
||||
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
|
||||
<subviews>
|
||||
<view contentMode="scaleToFill" fixedFrame="YES" translatesAutoresizingMaskIntoConstraints="NO" id="1BO-kg-lOt">
|
||||
<rect key="frame" x="0.0" y="0.0" width="414" height="125"/>
|
||||
<autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMaxY="YES"/>
|
||||
<subviews>
|
||||
<button opaque="NO" contentMode="scaleToFill" fixedFrame="YES" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="system" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="dv5-h1-tjP">
|
||||
<rect key="frame" x="8" y="86" width="200" height="31"/>
|
||||
<autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMaxY="YES"/>
|
||||
<state key="normal" title="Button"/>
|
||||
<buttonConfiguration key="configuration" style="plain" title="LANDMARKS (ON)" titleAlignment="center"/>
|
||||
<connections>
|
||||
<action selector="showLandmarksButtonTouchWithSender:" destination="BYZ-38-t0r" eventType="touchDown" id="rE9-Y7-U5g"/>
|
||||
</connections>
|
||||
</button>
|
||||
<button opaque="NO" contentMode="scaleToFill" fixedFrame="YES" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="system" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="oaC-Ax-V0a">
|
||||
<rect key="frame" x="216" y="86" width="190" height="31"/>
|
||||
<autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMaxY="YES"/>
|
||||
<state key="normal" title="Button"/>
|
||||
<buttonConfiguration key="configuration" style="plain" title="MODEL (LITE)"/>
|
||||
<connections>
|
||||
<action selector="setModelComplexity" destination="BYZ-38-t0r" eventType="touchDown" id="cVM-E4-dua"/>
|
||||
</connections>
|
||||
</button>
|
||||
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" fixedFrame="YES" text=" Copper Labs" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontForContentSizeCategory="YES" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="UGR-Ho-hUQ">
|
||||
<rect key="frame" x="8" y="41" width="398" height="37"/>
|
||||
<autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMaxY="YES"/>
|
||||
<fontDescription key="fontDescription" style="UICTFontTextStyleTitle0"/>
|
||||
<color key="textColor" white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
|
||||
<nil key="highlightedColor"/>
|
||||
</label>
|
||||
</subviews>
|
||||
<color key="backgroundColor" white="0.0" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
|
||||
<color key="tintColor" white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
|
||||
</view>
|
||||
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" fixedFrame="YES" text="35 fps" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" highlighted="YES" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="hSQ-so-ykH">
|
||||
<rect key="frame" x="20" y="133" width="374" height="33"/>
|
||||
<autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMaxY="YES"/>
|
||||
<fontDescription key="fontDescription" name="Rockwell-Regular" family="Rockwell" pointSize="20"/>
|
||||
<color key="textColor" systemColor="secondarySystemGroupedBackgroundColor"/>
|
||||
<nil key="highlightedColor"/>
|
||||
</label>
|
||||
</subviews>
|
||||
<viewLayoutGuide key="safeArea" id="6Tk-OE-BBY"/>
|
||||
<color key="backgroundColor" systemColor="systemBackgroundColor"/>
|
||||
</view>
|
||||
<connections>
|
||||
<outlet property="chooseModelButton" destination="oaC-Ax-V0a" id="Knp-ll-Zgu"/>
|
||||
<outlet property="fpsLabel" destination="hSQ-so-ykH" id="sw5-ik-ro9"/>
|
||||
<outlet property="liveView" destination="8bC-Xf-vdC" id="COw-5j-lAL"/>
|
||||
<outlet property="showLandmarksButton" destination="dv5-h1-tjP" id="xXW-UG-aSR"/>
|
||||
<outlet property="titleview" destination="1BO-kg-lOt" id="uP4-0G-Gix"/>
|
||||
</connections>
|
||||
</viewController>
|
||||
<placeholder placeholderIdentifier="IBFirstResponder" id="dkx-z0-nzr" sceneMemberID="firstResponder"/>
|
||||
</objects>
|
||||
<point key="canvasLocation" x="28.985507246376812" y="42.857142857142854"/>
|
||||
</scene>
|
||||
</scenes>
|
||||
<resources>
|
||||
<systemColor name="secondarySystemGroupedBackgroundColor">
|
||||
<color white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
|
||||
</systemColor>
|
||||
<systemColor name="systemBackgroundColor">
|
||||
<color white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
|
||||
</systemColor>
|
||||
</resources>
|
||||
</document>
|
|
@ -0,0 +1,35 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>UIApplicationSceneManifest</key>
|
||||
<dict>
|
||||
<key>NSCameraUsageDescription</key>
|
||||
<string>This app uses the camera to demonstrate live video processing.</string>
|
||||
<key>CFBundleDevelopmentRegion</key>
|
||||
<string>en</string>
|
||||
<key>CFBundleExecutable</key>
|
||||
<string>$(EXECUTABLE_NAME)</string>
|
||||
<key>CFBundleIdentifier</key>
|
||||
<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
|
||||
<key>CFBundleInfoDictionaryVersion</key>
|
||||
<string>6.0</string>
|
||||
<key>CFBundleName</key>
|
||||
<string>$(PRODUCT_NAME)</string>
|
||||
<key>CFBundlePackageType</key>
|
||||
<string>APPL</string>
|
||||
<key>CFBundleShortVersionString</key>
|
||||
<string>1.0</string>
|
||||
<key>CFBundleVersion</key>
|
||||
<string>1</string>
|
||||
<key>LSRequiresIPhoneOS</key>
|
||||
<true/>
|
||||
<key>UIApplicationSupportsMultipleScenes</key>
|
||||
<false/>
|
||||
<key>MainViewController</key>
|
||||
<string>ViewController</string>
|
||||
<key>UILaunchStoryboardName</key>
|
||||
<string>LaunchScreen</string>
|
||||
</dict>
|
||||
</dict>
|
||||
</plist>
|
|
@ -0,0 +1,164 @@
|
|||
//
|
||||
// ViewController.swift
|
||||
// PoseTrackingLindera
|
||||
//
|
||||
// Created by Mautisim Munir on 17/10/2022.
|
||||
//
|
||||
import UIKit
|
||||
import LinderaDetection
|
||||
|
||||
|
||||
class ViewController: UIViewController {
|
||||
|
||||
//MARK: - UI Elements
|
||||
|
||||
|
||||
@IBOutlet var liveView : UIView!
|
||||
@IBOutlet var showLandmarksButton: UIButton!
|
||||
@IBOutlet var chooseModelButton: UIButton!
|
||||
@IBOutlet var titleview: UIView!
|
||||
@IBOutlet var fpsLabel: UILabel!
|
||||
|
||||
|
||||
//MARK: - UI Actions
|
||||
|
||||
@IBAction func setModelComplexity(){
|
||||
let alert = UIAlertController(
|
||||
title: nil,
|
||||
message: nil,
|
||||
preferredStyle: .actionSheet
|
||||
)
|
||||
|
||||
alert.addAction(
|
||||
.init(title: "MODEL (LITE)", style: .default) {[weak self] _ in
|
||||
self?.lindera.setModelComplexityNow(complexity: 0)
|
||||
self?.updateModelButtonText()
|
||||
|
||||
}
|
||||
)
|
||||
|
||||
alert.addAction(
|
||||
.init(title: "MODEL (FULL)", style: .default) { [weak self] _ in
|
||||
self?.lindera.setModelComplexityNow(complexity: 1)
|
||||
self?.updateModelButtonText()
|
||||
|
||||
|
||||
}
|
||||
)
|
||||
alert.addAction(
|
||||
.init(title: "MODEL (HEAVY)", style: .default) { [weak self] _ in
|
||||
self?.lindera.setModelComplexityNow(complexity: 2)
|
||||
self?.updateModelButtonText()
|
||||
|
||||
|
||||
}
|
||||
)
|
||||
|
||||
present(alert, animated: true)
|
||||
}
|
||||
|
||||
@IBAction func showLandmarksButtonTouch(sender: UIButton){
|
||||
|
||||
lindera.showLandmarks(value: !lindera.areLandmarksShown());
|
||||
updateLandmarksButtonText()
|
||||
|
||||
}
|
||||
|
||||
// MARK: - LinderaDelegate
|
||||
|
||||
/// A simple LinderaDelegate implementation that prints nose coordinates if detected
|
||||
class LinderaDelegateImpl:LinderaDelegate{
|
||||
func lindera(_ lindera: Lindera, didDetect event: Asensei3DPose.Event) {
|
||||
// if let kpt = event.pose.nose{
|
||||
// // Printing causes large drops in FPS
|
||||
// print("LinderaDelegateImpl: Nose Keypoint (\(String(describing: kpt.position.x)),\(String(describing: kpt.position.y)),\(kpt.position.z)) with confidence \(kpt.confidence)")
|
||||
// }
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
// MARK: - UI Text Modifications
|
||||
func updateLandmarksButtonText(){
|
||||
if (lindera.areLandmarksShown()){
|
||||
showLandmarksButton.setTitle("LANDMARKS (ON)", for: UIControl.State.normal)
|
||||
}else{
|
||||
showLandmarksButton.setTitle("LANDMARKS (OFF)", for: UIControl.State.normal)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func updateModelButtonText(){
|
||||
var text = "MODEL "
|
||||
switch(lindera.getModelComplexity()){
|
||||
|
||||
case 0:
|
||||
text += "(LITE)"
|
||||
break;
|
||||
case 1:
|
||||
text += "(FULL)"
|
||||
break;
|
||||
case 2:
|
||||
text += "(HEAVY)"
|
||||
break;
|
||||
|
||||
default:
|
||||
text += "(Unknown)"
|
||||
}
|
||||
chooseModelButton.setTitle(text, for: UIControl.State.normal)
|
||||
}
|
||||
|
||||
|
||||
|
||||
// MARK: - State Objects
|
||||
|
||||
let lindera = Lindera()
|
||||
|
||||
let linderaDelegate = LinderaDelegateImpl()
|
||||
|
||||
// MARK: - UI Setup
|
||||
override func viewDidLoad() {
|
||||
super.viewDidLoad()
|
||||
|
||||
self.lindera.delegate = linderaDelegate
|
||||
|
||||
|
||||
if let view = self.liveView{
|
||||
// add lindera camera view to our app's UIView i.e. liveView
|
||||
view.addSubview(lindera.cameraView)
|
||||
// Expand our cameraView frame to liveView frame
|
||||
self.lindera.cameraView.frame = view.bounds
|
||||
|
||||
// Setting Up Constraints (No necessary with above statement)
|
||||
self.lindera.cameraView.translatesAutoresizingMaskIntoConstraints = false
|
||||
NSLayoutConstraint.activate([
|
||||
self.lindera.cameraView.leadingAnchor.constraint(equalTo: view.leadingAnchor),
|
||||
self.lindera.cameraView.topAnchor.constraint(equalTo: view.topAnchor),
|
||||
self.lindera.cameraView.trailingAnchor.constraint(equalTo: view.trailingAnchor),
|
||||
self.lindera.cameraView.bottomAnchor.constraint(equalTo: view.bottomAnchor)
|
||||
])
|
||||
}
|
||||
|
||||
// This function is called whenver there is an fps update
|
||||
self.lindera.setFpsDelegate(fpsDelegate: {[weak self] fps in
|
||||
DispatchQueue.main.async {
|
||||
self?.fpsLabel.text = "\(Int(fps)) fps"
|
||||
}
|
||||
|
||||
})
|
||||
|
||||
// Otherwise they are hidden
|
||||
self.liveView.bringSubviewToFront(titleview)
|
||||
self.liveView.bringSubviewToFront(fpsLabel)
|
||||
|
||||
// Make the Landmarks and Model button text reflect the state in lindera object
|
||||
updateLandmarksButtonText()
|
||||
updateModelButtonText()
|
||||
|
||||
lindera.startCamera()
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,11 @@
|
|||
{
|
||||
"colors" : [
|
||||
{
|
||||
"idiom" : "universal"
|
||||
}
|
||||
],
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
|
@ -0,0 +1,98 @@
|
|||
{
|
||||
"images" : [
|
||||
{
|
||||
"idiom" : "iphone",
|
||||
"scale" : "2x",
|
||||
"size" : "20x20"
|
||||
},
|
||||
{
|
||||
"idiom" : "iphone",
|
||||
"scale" : "3x",
|
||||
"size" : "20x20"
|
||||
},
|
||||
{
|
||||
"idiom" : "iphone",
|
||||
"scale" : "2x",
|
||||
"size" : "29x29"
|
||||
},
|
||||
{
|
||||
"idiom" : "iphone",
|
||||
"scale" : "3x",
|
||||
"size" : "29x29"
|
||||
},
|
||||
{
|
||||
"idiom" : "iphone",
|
||||
"scale" : "2x",
|
||||
"size" : "40x40"
|
||||
},
|
||||
{
|
||||
"idiom" : "iphone",
|
||||
"scale" : "3x",
|
||||
"size" : "40x40"
|
||||
},
|
||||
{
|
||||
"idiom" : "iphone",
|
||||
"scale" : "2x",
|
||||
"size" : "60x60"
|
||||
},
|
||||
{
|
||||
"idiom" : "iphone",
|
||||
"scale" : "3x",
|
||||
"size" : "60x60"
|
||||
},
|
||||
{
|
||||
"idiom" : "ipad",
|
||||
"scale" : "1x",
|
||||
"size" : "20x20"
|
||||
},
|
||||
{
|
||||
"idiom" : "ipad",
|
||||
"scale" : "2x",
|
||||
"size" : "20x20"
|
||||
},
|
||||
{
|
||||
"idiom" : "ipad",
|
||||
"scale" : "1x",
|
||||
"size" : "29x29"
|
||||
},
|
||||
{
|
||||
"idiom" : "ipad",
|
||||
"scale" : "2x",
|
||||
"size" : "29x29"
|
||||
},
|
||||
{
|
||||
"idiom" : "ipad",
|
||||
"scale" : "1x",
|
||||
"size" : "40x40"
|
||||
},
|
||||
{
|
||||
"idiom" : "ipad",
|
||||
"scale" : "2x",
|
||||
"size" : "40x40"
|
||||
},
|
||||
{
|
||||
"idiom" : "ipad",
|
||||
"scale" : "1x",
|
||||
"size" : "76x76"
|
||||
},
|
||||
{
|
||||
"idiom" : "ipad",
|
||||
"scale" : "2x",
|
||||
"size" : "76x76"
|
||||
},
|
||||
{
|
||||
"idiom" : "ipad",
|
||||
"scale" : "2x",
|
||||
"size" : "83.5x83.5"
|
||||
},
|
||||
{
|
||||
"idiom" : "ios-marketing",
|
||||
"scale" : "1x",
|
||||
"size" : "1024x1024"
|
||||
}
|
||||
],
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
|
@ -0,0 +1,6 @@
|
|||
{
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
|
@ -0,0 +1,21 @@
|
|||
{
|
||||
"images" : [
|
||||
{
|
||||
"filename" : "man-selfie.jpg",
|
||||
"idiom" : "universal",
|
||||
"scale" : "1x"
|
||||
},
|
||||
{
|
||||
"idiom" : "universal",
|
||||
"scale" : "2x"
|
||||
},
|
||||
{
|
||||
"idiom" : "universal",
|
||||
"scale" : "3x"
|
||||
}
|
||||
],
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
Binary file not shown.
After Width: | Height: | Size: 2.1 MiB |
|
@ -0,0 +1,21 @@
|
|||
{
|
||||
"images" : [
|
||||
{
|
||||
"filename" : "test-people.jpg",
|
||||
"idiom" : "universal",
|
||||
"scale" : "1x"
|
||||
},
|
||||
{
|
||||
"idiom" : "universal",
|
||||
"scale" : "2x"
|
||||
},
|
||||
{
|
||||
"idiom" : "universal",
|
||||
"scale" : "3x"
|
||||
}
|
||||
],
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
Binary file not shown.
After Width: | Height: | Size: 277 KiB |
|
@ -0,0 +1,73 @@
|
|||
load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library")
|
||||
load("@build_bazel_rules_apple//apple:ios.bzl", "ios_application")
|
||||
load("@build_bazel_rules_apple//apple:resources.bzl", "apple_resource_bundle")
|
||||
load(
|
||||
"//mediapipe/examples/ios:bundle_id.bzl",
|
||||
"BUNDLE_ID_PREFIX",
|
||||
"example_provisioning",
|
||||
)
|
||||
|
||||
MIN_IOS_VERSION = "14.0"
|
||||
|
||||
swift_library(
|
||||
name = "app_lib",
|
||||
srcs = glob(["**/*.swift"]),
|
||||
data = [
|
||||
"//mediapipe/graphs/pose_tracking:pose_tracking_gpu.binarypb",
|
||||
"//mediapipe/modules/pose_detection:pose_detection.tflite",
|
||||
"//mediapipe/modules/pose_landmark:pose_landmark_full.tflite",
|
||||
],
|
||||
linkopts = [
|
||||
"-lc++",
|
||||
"-std=c++17",
|
||||
"-lstdc++",
|
||||
],
|
||||
module_name = "app_lib",
|
||||
visibility = ["//visibility:public"],
|
||||
deps = [
|
||||
"@ios_opencv//:OpencvFramework",
|
||||
] + [
|
||||
"//mediapipe/objc/solutions/posetracking_gpu:posetracking_gpu_solution",
|
||||
"//mediapipe/objc:mediapipe_framework_ios",
|
||||
"//mediapipe/objc:mediapipe_input_sources_ios",
|
||||
"//mediapipe/objc:mediapipe_layer_renderer",
|
||||
] + select({
|
||||
"//mediapipe:ios_i386": [],
|
||||
"//mediapipe:ios_x86_64": [],
|
||||
"//conditions:default": [
|
||||
"//mediapipe/graphs/pose_tracking:pose_tracking_gpu_deps",
|
||||
"//mediapipe/framework/formats:landmark_cc_proto",
|
||||
],
|
||||
}),
|
||||
)
|
||||
|
||||
apple_resource_bundle(
|
||||
name = "app_resources",
|
||||
bundle_id = BUNDLE_ID_PREFIX + ".PoseTrackingGpu",
|
||||
resources = [
|
||||
"Assets.xcassets",
|
||||
],
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
|
||||
ios_application(
|
||||
name = "posetracking-solution-swift",
|
||||
bundle_id = BUNDLE_ID_PREFIX + ".PoseTrackingGpu",
|
||||
families = [
|
||||
"iphone",
|
||||
"ipad",
|
||||
],
|
||||
infoplists = ["Info.plist"],
|
||||
linkopts = [
|
||||
"-lc++",
|
||||
],
|
||||
minimum_os_version = MIN_IOS_VERSION,
|
||||
provisioning_profile = example_provisioning(),
|
||||
resources = [":app_resources"],
|
||||
visibility = ["//visibility:public"],
|
||||
deps = [
|
||||
":app_lib",
|
||||
":app_resources",
|
||||
# "@ios_opencv//:OpencvFramework",
|
||||
],
|
||||
)
|
|
@ -0,0 +1,32 @@
|
|||
import Foundation
|
||||
|
||||
enum CameraError: Error {
|
||||
case cameraUnavailable
|
||||
case cannotAddInput
|
||||
case cannotAddOutput
|
||||
case createCaptureInput(Error)
|
||||
case deniedAuthorization
|
||||
case restrictedAuthorization
|
||||
case unknownAuthorization
|
||||
}
|
||||
|
||||
extension CameraError: LocalizedError {
|
||||
var errorDescription: String? {
|
||||
switch self {
|
||||
case .cameraUnavailable:
|
||||
return "Camera unavailable"
|
||||
case .cannotAddInput:
|
||||
return "Cannot add capture input to session"
|
||||
case .cannotAddOutput:
|
||||
return "Cannot add video output to session"
|
||||
case .createCaptureInput(let error):
|
||||
return "Creating capture input for camera: \(error.localizedDescription)"
|
||||
case .deniedAuthorization:
|
||||
return "Camera access denied"
|
||||
case .restrictedAuthorization:
|
||||
return "Attempting to access a restricted capture device"
|
||||
case .unknownAuthorization:
|
||||
return "Unknown authorization status for capture device"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,142 @@
|
|||
import AVFoundation
|
||||
// 1
|
||||
class CameraManager: ObservableObject {
|
||||
|
||||
// 1
|
||||
@Published var error: CameraError?
|
||||
// 2
|
||||
let session = AVCaptureSession()
|
||||
// 3
|
||||
private let sessionQueue = DispatchQueue(label: "com.raywenderlich.SessionQ")
|
||||
// 4
|
||||
private let videoOutput = AVCaptureVideoDataOutput()
|
||||
// 5
|
||||
private var status = Status.unconfigured
|
||||
|
||||
|
||||
// 2
|
||||
enum Status {
|
||||
case unconfigured
|
||||
case configured
|
||||
case unauthorized
|
||||
case failed
|
||||
}
|
||||
// 3
|
||||
static let shared = CameraManager()
|
||||
// 4
|
||||
private init() {
|
||||
configure()
|
||||
}
|
||||
// 5
|
||||
private func configure() {
|
||||
checkPermissions()
|
||||
sessionQueue.async {
|
||||
self.configureCaptureSession()
|
||||
self.session.startRunning()
|
||||
}
|
||||
|
||||
}
|
||||
func set(
|
||||
_ delegate: AVCaptureVideoDataOutputSampleBufferDelegate,
|
||||
queue: DispatchQueue
|
||||
) {
|
||||
sessionQueue.async {
|
||||
self.videoOutput.setSampleBufferDelegate(delegate, queue: queue)
|
||||
}
|
||||
}
|
||||
|
||||
private func set(error: CameraError?) {
|
||||
DispatchQueue.main.async {
|
||||
self.error = error
|
||||
}
|
||||
}
|
||||
private func checkPermissions() {
|
||||
// 1
|
||||
switch AVCaptureDevice.authorizationStatus(for: .video) {
|
||||
case .notDetermined:
|
||||
// 2
|
||||
sessionQueue.suspend()
|
||||
AVCaptureDevice.requestAccess(for: .video) { authorized in
|
||||
// 3
|
||||
if !authorized {
|
||||
self.status = .unauthorized
|
||||
self.set(error: .deniedAuthorization)
|
||||
}
|
||||
self.sessionQueue.resume()
|
||||
}
|
||||
// 4
|
||||
case .restricted:
|
||||
status = .unauthorized
|
||||
set(error: .restrictedAuthorization)
|
||||
case .denied:
|
||||
status = .unauthorized
|
||||
set(error: .deniedAuthorization)
|
||||
// 5
|
||||
case .authorized:
|
||||
break
|
||||
// 6
|
||||
@unknown default:
|
||||
status = .unauthorized
|
||||
set(error: .unknownAuthorization)
|
||||
}
|
||||
}
|
||||
private func configureCaptureSession() {
|
||||
guard status == .unconfigured else {
|
||||
return
|
||||
}
|
||||
session.beginConfiguration()
|
||||
defer {
|
||||
session.commitConfiguration()
|
||||
}
|
||||
let device = AVCaptureDevice.default(
|
||||
.builtInWideAngleCamera,
|
||||
for: .video,
|
||||
position: .front)
|
||||
guard let camera = device else {
|
||||
set(error: .cameraUnavailable)
|
||||
status = .failed
|
||||
return
|
||||
}
|
||||
|
||||
do {
|
||||
// 1
|
||||
let cameraInput = try AVCaptureDeviceInput(device: camera)
|
||||
// 2
|
||||
if session.canAddInput(cameraInput) {
|
||||
session.addInput(cameraInput)
|
||||
} else {
|
||||
// 3
|
||||
set(error: .cannotAddInput)
|
||||
status = .failed
|
||||
return
|
||||
}
|
||||
} catch {
|
||||
// 4
|
||||
set(error: .createCaptureInput(error))
|
||||
status = .failed
|
||||
return
|
||||
}
|
||||
// 1
|
||||
if session.canAddOutput(videoOutput) {
|
||||
session.addOutput(videoOutput)
|
||||
// 2
|
||||
videoOutput.videoSettings =
|
||||
[kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA]
|
||||
// 3
|
||||
let videoConnection = videoOutput.connection(with: .video)
|
||||
videoConnection?.videoOrientation = .portrait
|
||||
} else {
|
||||
// 4
|
||||
set(error: .cannotAddOutput)
|
||||
status = .failed
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,33 @@
|
|||
import AVFoundation
|
||||
// 1
|
||||
class FrameManager: NSObject, ObservableObject {
|
||||
// 2
|
||||
static let shared = FrameManager()
|
||||
// 3
|
||||
@Published var current: CVPixelBuffer?
|
||||
// 4
|
||||
let videoOutputQueue = DispatchQueue(
|
||||
label: "com.raywenderlich.VideoOutputQ",
|
||||
qos: .userInitiated,
|
||||
attributes: [],
|
||||
autoreleaseFrequency: .workItem)
|
||||
// 5
|
||||
private override init() {
|
||||
super.init()
|
||||
CameraManager.shared.set(self, queue: videoOutputQueue)
|
||||
}
|
||||
}
|
||||
|
||||
extension FrameManager: AVCaptureVideoDataOutputSampleBufferDelegate {
|
||||
func captureOutput(
|
||||
_ output: AVCaptureOutput,
|
||||
didOutput sampleBuffer: CMSampleBuffer,
|
||||
from connection: AVCaptureConnection
|
||||
) {
|
||||
if let buffer = sampleBuffer.imageBuffer {
|
||||
DispatchQueue.main.async {
|
||||
self.current = buffer
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,35 @@
|
|||
import SwiftUI
|
||||
import MPPoseTracking
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
struct ContentView: View {
|
||||
@StateObject private var model = ContentViewModel()
|
||||
|
||||
let poseTracking = PoseTracking(poseTrackingOptions: PoseTrackingOptions(showLandmarks: true))
|
||||
|
||||
|
||||
|
||||
init() {
|
||||
poseTracking?.renderer.layer.frame = self.body.layer
|
||||
|
||||
}
|
||||
|
||||
var body: some View {
|
||||
VStack{
|
||||
FrameView(image: model.frame)
|
||||
.edgesIgnoringSafeArea(.all)
|
||||
// buildInferenceView()
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
struct ContentView_Previews: PreviewProvider {
|
||||
static var previews: some View {
|
||||
ContentView()
|
||||
}
|
||||
}
|
|
@ -0,0 +1,47 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>NSPhotoLibraryUsageDescription</key>
|
||||
<string>We need to access your library</string>
|
||||
<key>CFBundleDevelopmentRegion</key>
|
||||
<string>$(DEVELOPMENT_LANGUAGE)</string>
|
||||
<key>CFBundleExecutable</key>
|
||||
<string>$(EXECUTABLE_NAME)</string>
|
||||
<key>CFBundleIdentifier</key>
|
||||
<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
|
||||
<key>CFBundleInfoDictionaryVersion</key>
|
||||
<string>6.0</string>
|
||||
<key>CFBundleName</key>
|
||||
<string>$(PRODUCT_NAME)</string>
|
||||
<key>CFBundlePackageType</key>
|
||||
<string>APPL</string>
|
||||
<key>CFBundleShortVersionString</key>
|
||||
<string>1.0</string>
|
||||
<key>CFBundleVersion</key>
|
||||
<string>1</string>
|
||||
<key>LSRequiresIPhoneOS</key>
|
||||
<true/>
|
||||
<key>NSCameraUsageDescription</key>
|
||||
<string>We need to access your Camera</string>
|
||||
<key>UIApplicationSupportsIndirectInputEvents</key>
|
||||
<true/>
|
||||
<key>UIRequiredDeviceCapabilities</key>
|
||||
<array>
|
||||
<string>armv7</string>
|
||||
</array>
|
||||
<key>UISupportedInterfaceOrientations</key>
|
||||
<array>
|
||||
<string>UIInterfaceOrientationPortrait</string>
|
||||
<string>UIInterfaceOrientationLandscapeLeft</string>
|
||||
<string>UIInterfaceOrientationLandscapeRight</string>
|
||||
</array>
|
||||
<key>UISupportedInterfaceOrientations~ipad</key>
|
||||
<array>
|
||||
<string>UIInterfaceOrientationPortrait</string>
|
||||
<string>UIInterfaceOrientationPortraitUpsideDown</string>
|
||||
<string>UIInterfaceOrientationLandscapeLeft</string>
|
||||
<string>UIInterfaceOrientationLandscapeRight</string>
|
||||
</array>
|
||||
</dict>
|
||||
</plist>
|
|
@ -0,0 +1,17 @@
|
|||
//
|
||||
// ModelsLabTestApp.swift
|
||||
// ModelsLabTest
|
||||
//
|
||||
// Created by Mautisim Munir on 02/06/2022.
|
||||
//
|
||||
|
||||
import SwiftUI
|
||||
|
||||
@main
|
||||
struct ModelsLabTestApp: App {
|
||||
var body: some Scene {
|
||||
WindowGroup {
|
||||
ContentView()
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,6 @@
|
|||
{
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
|
@ -0,0 +1,60 @@
|
|||
//
|
||||
// ContentViewModel.swift
|
||||
// ModelsLabTest
|
||||
//
|
||||
// Created by Mautisim Munir on 12/06/2022.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
import CoreImage
|
||||
import UIKit
|
||||
import SwiftUI
|
||||
import MPPoseTracking
|
||||
|
||||
|
||||
|
||||
|
||||
class ContentViewModel: ObservableObject {
|
||||
// 1
|
||||
@Published var frame: CGImage?
|
||||
// 2
|
||||
private let frameManager = FrameManager.shared
|
||||
var counter = 0
|
||||
|
||||
|
||||
|
||||
|
||||
// let modelPath = Bundle.main.path(forResource: "model", ofType: "edgem")!
|
||||
|
||||
// let model:EdgeModel
|
||||
|
||||
init() {
|
||||
// model = EdgeModel(modelPath: modelPath)
|
||||
setupSubscriptions()
|
||||
}
|
||||
// 3
|
||||
func setupSubscriptions() {
|
||||
// 1
|
||||
frameManager.$current
|
||||
// 2
|
||||
.receive(on: RunLoop.main)
|
||||
// 3
|
||||
|
||||
|
||||
.compactMap{
|
||||
buffer in
|
||||
if buffer != nil {
|
||||
let ciContext = CIContext()
|
||||
let ciImage = CIImage(cvImageBuffer: buffer!)
|
||||
let cgImage = ciContext.createCGImage(ciImage, from: ciImage.extent)
|
||||
|
||||
return cgImage;
|
||||
}
|
||||
return nil
|
||||
|
||||
|
||||
}
|
||||
.assign(to: &$frame)
|
||||
|
||||
}
|
||||
}
|
|
@ -0,0 +1,41 @@
|
|||
//
|
||||
// FrameView.swift
|
||||
// ModelsLabTest
|
||||
//
|
||||
// Created by Mautisim Munir on 12/06/2022.
|
||||
//
|
||||
|
||||
import SwiftUI
|
||||
|
||||
struct FrameView: View {
|
||||
var image: CGImage?
|
||||
private let label = Text("Camera feed")
|
||||
var body: some View {
|
||||
// 1
|
||||
if let image = image {
|
||||
// 2
|
||||
GeometryReader { geometry in
|
||||
// 3
|
||||
Image(image, scale: 1.0, orientation: .upMirrored, label: label)
|
||||
.resizable()
|
||||
// .scaledToFit()
|
||||
.scaledToFill()
|
||||
.frame(
|
||||
width: geometry.size.width,
|
||||
height: geometry.size.height,
|
||||
alignment: .center)
|
||||
.clipped()
|
||||
}
|
||||
} else {
|
||||
// 4
|
||||
Color.black
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
struct FrameView_Previews: PreviewProvider {
|
||||
static var previews: some View {
|
||||
FrameView()
|
||||
}
|
||||
}
|
|
@ -44,6 +44,8 @@ node {
|
|||
node {
|
||||
calculator: "PoseLandmarkGpu"
|
||||
input_side_packet: "ENABLE_SEGMENTATION:enable_segmentation"
|
||||
input_side_packet: "MODEL_COMPLEXITY:model_complexity"
|
||||
|
||||
input_stream: "IMAGE:throttled_input_video"
|
||||
output_stream: "LANDMARKS:pose_landmarks"
|
||||
output_stream: "SEGMENTATION_MASK:segmentation_mask"
|
||||
|
|
|
@ -4,13 +4,21 @@ objc_library(
|
|||
"*.h",
|
||||
"*.mm",
|
||||
]),
|
||||
module_name = "MPPoseTracking",
|
||||
hdrs = [
|
||||
"PoseTracking.h",
|
||||
"PoseTrackingOptions.h",
|
||||
"PoseTrackingResults.h"
|
||||
],
|
||||
copts = [
|
||||
"-Wno-shorten-64-to-32",
|
||||
],
|
||||
data = [
|
||||
"//mediapipe/graphs/pose_tracking:pose_tracking_gpu.binarypb",
|
||||
"//mediapipe/modules/pose_detection:pose_detection.tflite",
|
||||
"//mediapipe/modules/pose_landmark:pose_landmark_heavy.tflite",
|
||||
"//mediapipe/modules/pose_landmark:pose_landmark_full.tflite",
|
||||
"//mediapipe/modules/pose_landmark:pose_landmark_lite.tflite", ] ,
|
||||
sdk_frameworks = ["Accelerate"],
|
||||
visibility = ["//visibility:public"],
|
||||
deps = [
|
||||
|
|
|
@ -6,26 +6,23 @@
|
|||
#define MEDIAPIPE_POSETRACKING_H
|
||||
#import <Foundation/Foundation.h>
|
||||
#import "mediapipe/objc/MPPCameraInputSource.h"
|
||||
#import "mediapipe/objc/MPPGraph.h"
|
||||
#import "mediapipe/objc/MPPLayerRenderer.h"
|
||||
#import "mediapipe/objc/MPPPlayerInputSource.h"
|
||||
#import "mediapipe/objc/MPPTimestampConverter.h"
|
||||
#import "PoseTrackingOptions.h"
|
||||
#import "PoseTrackingResults.h"
|
||||
@interface PoseTracking : NSObject<MPPGraphDelegate,MPPInputSourceDelegate>
|
||||
@interface PoseTracking : NSObject<MPPInputSourceDelegate>
|
||||
|
||||
// The MediaPipe graph currently in use. Initialized in viewDidLoad, started in
|
||||
// viewWillAppear: and sent video frames on videoQueue.
|
||||
@property(nonatomic) MPPGraph* mediapipeGraph;
|
||||
//@property(nonatomic) MPPGraph* mediapipeGraph;
|
||||
|
||||
|
||||
|
||||
// Helps to convert timestamp.
|
||||
@property(nonatomic) MPPTimestampConverter* timestampConverter;
|
||||
|
||||
// Render frames in a layer.
|
||||
@property(nonatomic) MPPLayerRenderer* renderer;
|
||||
|
||||
|
||||
@property (nonatomic) CMTime timeStamp;
|
||||
// Graph name.
|
||||
@property(nonatomic) NSString* graphName;
|
||||
|
||||
|
@ -45,8 +42,15 @@
|
|||
// Codeblock that runs whenever pose tracking results are available
|
||||
@property(nonatomic) void(^poseTrackingResultsListener)(PoseTrackingResults*);
|
||||
|
||||
// Codeblock that runs whenever output is available
|
||||
@property(nonatomic) void(^graphOutputStreamListener)();
|
||||
|
||||
- (instancetype) initWithPoseTrackingOptions: (PoseTrackingOptions*) poseTrackingOptions;
|
||||
- (void)startGraph;
|
||||
- (void) startWithCamera: (MPPCameraInputSource*) cameraSource;
|
||||
- (void)showLandmarks: (BOOL) value;
|
||||
- (BOOL) areLandmarksShown;
|
||||
- (void) stopGraph;
|
||||
@end
|
||||
|
||||
|
||||
|
|
|
@ -1,118 +1,56 @@
|
|||
#include "PoseTracking.h"
|
||||
#include "mediapipe/framework/formats/landmark.pb.h"
|
||||
#import "mediapipe/objc/MPPGraph.h"
|
||||
#import "mediapipe/objc/MPPTimestampConverter.h"
|
||||
#include "mediapipe/framework/packet.h"
|
||||
|
||||
static const char* kVideoQueueLabel = "com.google.mediapipe.example.videoQueue";
|
||||
static const char* kLandmarksOutputStream = "pose_landmarks";
|
||||
|
||||
@implementation PoseTracking
|
||||
|
||||
#pragma mark - MediaPipe graph methods
|
||||
|
||||
+ (MPPGraph*)loadGraphFromResource:(NSString*)resource {
|
||||
// Load the graph config resource.
|
||||
NSError* configLoadError = nil;
|
||||
NSBundle* bundle = [NSBundle bundleForClass:[self class]];
|
||||
if (!resource || resource.length == 0) {
|
||||
return nil;
|
||||
}
|
||||
NSURL* graphURL = [bundle URLForResource:resource withExtension:@"binarypb"];
|
||||
NSData* data = [NSData dataWithContentsOfURL:graphURL options:0 error:&configLoadError];
|
||||
if (!data) {
|
||||
NSLog(@"Failed to load MediaPipe graph config: %@", configLoadError);
|
||||
return nil;
|
||||
}
|
||||
|
||||
// Parse the graph config resource into mediapipe::CalculatorGraphConfig proto object.
|
||||
mediapipe::CalculatorGraphConfig config;
|
||||
config.ParseFromArray(data.bytes, data.length);
|
||||
|
||||
// Create MediaPipe graph with mediapipe::CalculatorGraphConfig proto object.
|
||||
MPPGraph* newGraph = [[MPPGraph alloc] initWithGraphConfig:config];
|
||||
return newGraph;
|
||||
}
|
||||
|
||||
- (instancetype) initWithPoseTrackingOptions: (PoseTrackingOptions*) poseTrackingOptions{
|
||||
self.renderer = [[MPPLayerRenderer alloc] init];
|
||||
self.renderer.frameScaleMode = MPPFrameScaleModeFillAndCrop;
|
||||
|
||||
self.timestampConverter = [[MPPTimestampConverter alloc] init];
|
||||
|
||||
dispatch_queue_attr_t qosAttribute = dispatch_queue_attr_make_with_qos_class(
|
||||
DISPATCH_QUEUE_SERIAL, QOS_CLASS_USER_INTERACTIVE, /*relative_priority=*/0);
|
||||
self.videoQueue = dispatch_queue_create(kVideoQueueLabel, qosAttribute);
|
||||
|
||||
self.poseTrackingOptions = poseTrackingOptions;
|
||||
self.graphName = @"pose_tracking_gpu";
|
||||
self.mediapipeGraph = [[self class] loadGraphFromResource: self.graphName];
|
||||
self.graphInputStream = "input_video";
|
||||
|
||||
|
||||
if (poseTrackingOptions.showLandmarks){
|
||||
self.graphOutputStream = "output_video";
|
||||
}else{
|
||||
self.graphOutputStream = "throttled_input_video";
|
||||
}
|
||||
|
||||
[self.mediapipeGraph addFrameOutputStream:self.graphOutputStream
|
||||
outputPacketType:MPPPacketTypePixelBuffer];
|
||||
# pragma mark - PoseTrackingGraphDelegate Interface
|
||||
@interface PoseTrackingGraphDelegate : NSObject<MPPGraphDelegate>
|
||||
// Receives CVPixelBufferRef from the MediaPipe graph. Invoked on a MediaPipe worker thread.
|
||||
@property (nonatomic) MPPGraph* mediapipeGraph;
|
||||
@property (nonatomic) const char* graphOutputStream;
|
||||
@property (nonatomic) MPPLayerRenderer* renderer;
|
||||
@property(nonatomic) void(^poseTrackingResultsListener)(PoseTrackingResults*);
|
||||
@property(nonatomic) void(^graphOutputStreamListener)();
|
||||
|
||||
|
||||
-(id) initWithMediapipeGraph: (MPPGraph*) graph graphOutputStream: (const char*) graphOutputStream
|
||||
renderer: (MPPLayerRenderer*) renderer;
|
||||
- (void)mediapipeGraph:(MPPGraph*)graph
|
||||
didOutputPixelBuffer:(CVPixelBufferRef)pixelBuffer
|
||||
fromStream:(const std::string&)streamName ;
|
||||
- (void)mediapipeGraph:(MPPGraph*)graph
|
||||
didOutputPacket:(const ::mediapipe::Packet&)packet
|
||||
fromStream:(const std::string&)streamName ;
|
||||
|
||||
[self.mediapipeGraph addFrameOutputStream:"pose_landmarks"
|
||||
outputPacketType:MPPPacketTypeRaw];
|
||||
@end
|
||||
|
||||
self.mediapipeGraph.delegate = self;
|
||||
# pragma mark - PoseTrackingGraphDelegate Implementation
|
||||
|
||||
self.poseTrackingResultsListener = ^(PoseTrackingResults*){};
|
||||
@implementation PoseTrackingGraphDelegate
|
||||
|
||||
-(id) initWithMediapipeGraph: (MPPGraph*) graph graphOutputStream: (const char*) graphOutputStream
|
||||
renderer: (MPPLayerRenderer*) renderer
|
||||
{
|
||||
|
||||
self.mediapipeGraph = graph;
|
||||
self.graphOutputStream =graphOutputStream;
|
||||
self.renderer = renderer;
|
||||
return self;
|
||||
}
|
||||
|
||||
|
||||
|
||||
- (void)startGraph {
|
||||
// Start running self.mediapipeGraph.
|
||||
NSError* error;
|
||||
if (![self.mediapipeGraph startWithError:&error]) {
|
||||
NSLog(@"Failed to start graph: %@", error);
|
||||
}
|
||||
else if (![self.mediapipeGraph waitUntilIdleWithError:&error]) {
|
||||
NSLog(@"Failed to complete graph initial run: %@", error);
|
||||
}
|
||||
}
|
||||
|
||||
- (void) startWithCamera: (MPPCameraInputSource*) cameraSource {
|
||||
[cameraSource setDelegate:self queue:self.videoQueue];
|
||||
|
||||
[self startGraph];
|
||||
// Start fetching frames from the camera.
|
||||
dispatch_async(self.videoQueue, ^{
|
||||
[cameraSource start];
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
#pragma mark - MPPInputSourceDelegate methods
|
||||
|
||||
// Must be invoked on self.videoQueue.
|
||||
- (void)processVideoFrame:(CVPixelBufferRef)imageBuffer
|
||||
timestamp:(CMTime)timestamp
|
||||
fromSource:(MPPInputSource*)source {
|
||||
|
||||
[self.mediapipeGraph sendPixelBuffer:imageBuffer
|
||||
intoStream:self.graphInputStream
|
||||
packetType:MPPPacketTypePixelBuffer
|
||||
timestamp:[self.timestampConverter timestampForMediaTime:timestamp]];
|
||||
}
|
||||
|
||||
#pragma mark - MPPGraphDelegate methods
|
||||
|
||||
// Receives CVPixelBufferRef from the MediaPipe graph. Invoked on a MediaPipe worker thread.
|
||||
- (void)mediapipeGraph:(MPPGraph*)graph
|
||||
didOutputPixelBuffer:(CVPixelBufferRef)pixelBuffer
|
||||
fromStream:(const std::string&)streamName {
|
||||
if (streamName == self.graphOutputStream) {
|
||||
self.graphOutputStreamListener();
|
||||
|
||||
// Display the captured image on the screen.
|
||||
CVPixelBufferRetain(pixelBuffer);
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
|
@ -146,4 +84,170 @@ static const char* kLandmarksOutputStream = "pose_landmarks";
|
|||
}
|
||||
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
|
||||
@interface PoseTracking(){
|
||||
// The MediaPipe graph currently in use. Initialized in viewDidLoad, started in
|
||||
// viewWillAppear: and sent video frames on videoQueue.
|
||||
MPPGraph* mediapipeGraph;
|
||||
PoseTrackingGraphDelegate* poseTrackingGraphDelegate;
|
||||
//// Helps to convert timestamp.
|
||||
MPPTimestampConverter* timestampConverter;
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
@implementation PoseTracking
|
||||
|
||||
#pragma mark - MediaPipe graph methods
|
||||
|
||||
+ (MPPGraph*)loadGraphFromResource:(NSString*)resource {
|
||||
// Load the graph config resource.
|
||||
NSError* configLoadError = nil;
|
||||
NSBundle* bundle = [NSBundle bundleForClass:[self class]];
|
||||
if (!resource || resource.length == 0) {
|
||||
return nil;
|
||||
}
|
||||
NSURL* graphURL = [bundle URLForResource:resource withExtension:@"binarypb"];
|
||||
NSData* data = [NSData dataWithContentsOfURL:graphURL options:0 error:&configLoadError];
|
||||
if (!data) {
|
||||
NSLog(@"Failed to load MediaPipe graph config: %@", configLoadError);
|
||||
return nil;
|
||||
}
|
||||
|
||||
// Parse the graph config resource into mediapipe::CalculatorGraphConfig proto object.
|
||||
mediapipe::CalculatorGraphConfig config;
|
||||
config.ParseFromArray(data.bytes, data.length);
|
||||
|
||||
// Create MediaPipe graph with mediapipe::CalculatorGraphConfig proto object.
|
||||
MPPGraph* newGraph = [[MPPGraph alloc] initWithGraphConfig:config];
|
||||
return newGraph;
|
||||
}
|
||||
|
||||
- (instancetype) initWithPoseTrackingOptions: (PoseTrackingOptions*) poseTrackingOptions{
|
||||
self.renderer = [[MPPLayerRenderer alloc] init];
|
||||
self.renderer.frameScaleMode = MPPFrameScaleModeFillAndCrop;
|
||||
|
||||
self->timestampConverter = [[MPPTimestampConverter alloc] init];
|
||||
|
||||
dispatch_queue_attr_t qosAttribute = dispatch_queue_attr_make_with_qos_class(
|
||||
DISPATCH_QUEUE_SERIAL, QOS_CLASS_USER_INTERACTIVE, /*relative_priority=*/0);
|
||||
self.videoQueue = dispatch_queue_create(kVideoQueueLabel, qosAttribute);
|
||||
|
||||
self.poseTrackingOptions = poseTrackingOptions;
|
||||
self.graphName = @"pose_tracking_gpu";
|
||||
self->mediapipeGraph = [[self class] loadGraphFromResource: self.graphName];
|
||||
self.graphInputStream = "input_video";
|
||||
|
||||
|
||||
if (poseTrackingOptions.showLandmarks){
|
||||
self.graphOutputStream = "output_video";
|
||||
}else{
|
||||
self.graphOutputStream = "throttled_input_video";
|
||||
}
|
||||
|
||||
[self->mediapipeGraph addFrameOutputStream:"output_video"
|
||||
outputPacketType:MPPPacketTypePixelBuffer];
|
||||
[self->mediapipeGraph addFrameOutputStream:"throttled_input_video"
|
||||
outputPacketType:MPPPacketTypePixelBuffer];
|
||||
|
||||
|
||||
self.poseTrackingResultsListener = ^(PoseTrackingResults*){};
|
||||
|
||||
|
||||
[self->mediapipeGraph addFrameOutputStream:"pose_landmarks"
|
||||
outputPacketType:MPPPacketTypeRaw];
|
||||
self-> poseTrackingGraphDelegate = [[PoseTrackingGraphDelegate alloc] initWithMediapipeGraph:self->mediapipeGraph graphOutputStream:self.graphOutputStream renderer:self.renderer];
|
||||
// To prevent ARC from causing an accidental memory leak in the next block
|
||||
__weak PoseTracking* weakSelf = self;
|
||||
self -> poseTrackingGraphDelegate.poseTrackingResultsListener = ^(PoseTrackingResults* results){
|
||||
|
||||
weakSelf.poseTrackingResultsListener(results);
|
||||
};
|
||||
|
||||
self -> poseTrackingGraphDelegate.graphOutputStreamListener = ^(){
|
||||
if (weakSelf.graphOutputStream != nil)
|
||||
weakSelf.graphOutputStreamListener();
|
||||
};
|
||||
|
||||
|
||||
|
||||
|
||||
self->mediapipeGraph.delegate = self->poseTrackingGraphDelegate;
|
||||
|
||||
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)showLandmarks: (BOOL) value{
|
||||
if (value){
|
||||
self->poseTrackingGraphDelegate.graphOutputStream = "output_video";
|
||||
}else{
|
||||
self->poseTrackingGraphDelegate.graphOutputStream = "throttled_input_video";
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
- (BOOL) areLandmarksShown{
|
||||
return self->poseTrackingGraphDelegate.graphOutputStream == "output_video";
|
||||
}
|
||||
|
||||
- (void)startGraph {
|
||||
// Start running self.mediapipeGraph.
|
||||
[self->mediapipeGraph setSidePacket:mediapipe::MakePacket<int>(self.poseTrackingOptions.modelComplexity) named:"model_complexity"];
|
||||
NSError* error;
|
||||
if (![self->mediapipeGraph startWithError:&error]) {
|
||||
NSLog(@"Failed to start graph: %@", error);
|
||||
}
|
||||
else if (![self->mediapipeGraph waitUntilIdleWithError:&error]) {
|
||||
NSLog(@"Failed to complete graph initial run: %@", error);
|
||||
}
|
||||
}
|
||||
|
||||
- (void) stopGraph {
|
||||
[self->mediapipeGraph cancel];
|
||||
NSError* error;
|
||||
if ([self->mediapipeGraph closeAllInputStreamsWithError: &error]){
|
||||
if (![self->mediapipeGraph waitUntilDoneWithError:&error]){
|
||||
NSLog(@"Failed to stop graph: %@", error);
|
||||
|
||||
}
|
||||
}else {
|
||||
NSLog(@"Failed to close input streams: %@", error);
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
- (void) startWithCamera: (MPPCameraInputSource*) cameraSource {
|
||||
[cameraSource setDelegate:self queue:self.videoQueue];
|
||||
|
||||
[self startGraph];
|
||||
// Start fetching frames from the camera.
|
||||
dispatch_async(self.videoQueue, ^{
|
||||
[cameraSource start];
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
#pragma mark - MPPInputSourceDelegate methods
|
||||
|
||||
// Must be invoked on self.videoQueue.
|
||||
- (void)processVideoFrame:(CVPixelBufferRef)imageBuffer
|
||||
timestamp:(CMTime)timestamp
|
||||
fromSource:(MPPInputSource*)source {
|
||||
|
||||
self.timeStamp = timestamp;
|
||||
|
||||
[self->mediapipeGraph sendPixelBuffer:imageBuffer
|
||||
intoStream:self.graphInputStream
|
||||
packetType:MPPPacketTypePixelBuffer
|
||||
timestamp:[self->timestampConverter timestampForMediaTime:timestamp]];
|
||||
}
|
||||
|
||||
#pragma mark - MPPGraphDelegate methods
|
||||
|
||||
|
||||
@end
|
||||
|
|
|
@ -5,12 +5,16 @@
|
|||
#ifndef MEDIAPIPE_POSETRACKINGOPTIONS_H
|
||||
#define MEDIAPIPE_POSETRACKINGOPTIONS_H
|
||||
#import <Foundation/Foundation.h>
|
||||
@interface PoseTrackingOptions: NSObject
|
||||
|
||||
|
||||
|
||||
@interface PoseTrackingOptions: NSObject
|
||||
@property(nonatomic) int modelComplexity;
|
||||
@property(nonatomic) bool showLandmarks;
|
||||
//@property(nonatomic) int cameraRotation;
|
||||
|
||||
- (instancetype) initWithShowLandmarks : (bool) showLandmarks;
|
||||
|
||||
- (instancetype) initWithShowLandmarks : (bool) showLandmarks modelComplexity: (int) modelComplexity;
|
||||
|
||||
@end
|
||||
|
||||
|
|
|
@ -2,9 +2,10 @@
|
|||
|
||||
@implementation PoseTrackingOptions
|
||||
|
||||
- (instancetype) initWithShowLandmarks : (bool) showLandmarks {
|
||||
- (instancetype) initWithShowLandmarks : (bool) showLandmarks modelComplexity: (int) modelComplexity{
|
||||
// self.cameraRotation = cameraRotation;
|
||||
self.showLandmarks = showLandmarks;
|
||||
self.modelComplexity = modelComplexity;
|
||||
return self;
|
||||
}
|
||||
|
||||
|
|
|
@ -2,6 +2,44 @@
|
|||
#define MEDIAPIPE_POSETRACKINGRESULTS_H
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
static const NSInteger POSE_NOSE = 0;
|
||||
static const NSInteger POSE_LEFT_EYE_INNER = 1;
|
||||
static const NSInteger POSE_LEFT_EYE = 2;
|
||||
static const NSInteger POSE_LEFT_EYE_OUTER = 3;
|
||||
static const NSInteger POSE_RIGHT_EYE_INNER = 4;
|
||||
static const NSInteger POSE_RIGHT_EYE = 5;
|
||||
static const NSInteger POSE_RIGHT_EYE_OUTER = 6;
|
||||
static const NSInteger POSE_LEFT_EAR = 7;
|
||||
static const NSInteger POSE_RIGHT_EAR = 8;
|
||||
static const NSInteger POSE_MOUTH_LEFT = 9;
|
||||
static const NSInteger POSE_MOUTH_RIGHT = 10;
|
||||
static const NSInteger POSE_LEFT_SHOULDER = 11;
|
||||
static const NSInteger POSE_RIGHT_SHOULDER = 12;
|
||||
static const NSInteger POSE_LEFT_ELBOW = 13;
|
||||
static const NSInteger POSE_RIGHT_ELBOW = 14;
|
||||
static const NSInteger POSE_LEFT_WRIST = 15;
|
||||
static const NSInteger POSE_RIGHT_WRIST = 16;
|
||||
static const NSInteger POSE_LEFT_PINKY = 17;
|
||||
static const NSInteger POSE_RIGHT_PINKY = 18;
|
||||
static const NSInteger POSE_LEFT_INDEX = 19;
|
||||
static const NSInteger POSE_RIGHT_INDEX = 20;
|
||||
static const NSInteger POSE_LEFT_THUMB = 21;
|
||||
static const NSInteger POSE_RIGHT_THUMB = 22;
|
||||
static const NSInteger POSE_LEFT_HIP = 23;
|
||||
static const NSInteger POSE_RIGHT_HIP = 24;
|
||||
static const NSInteger POSE_LEFT_KNEE = 25;
|
||||
static const NSInteger POSE_RIGHT_KNEE = 26;
|
||||
static const NSInteger POSE_LEFT_ANKLE = 27;
|
||||
static const NSInteger POSE_RIGHT_ANKLE = 28;
|
||||
static const NSInteger POSE_LEFT_HEEL = 29;
|
||||
static const NSInteger POSE_RIGHT_HEEL = 30;
|
||||
static const NSInteger POSE_LEFT_FOOT = 31;
|
||||
static const NSInteger POSE_RIGHT_FOOT = 32;
|
||||
|
||||
|
||||
|
||||
|
||||
@interface PoseLandmark: NSObject
|
||||
|
||||
@property float x;
|
||||
|
|
|
@ -7,6 +7,8 @@
|
|||
self.x = x;
|
||||
self.y = y;
|
||||
self.z = z;
|
||||
self.presence = presence;
|
||||
self.visibility = visibility;
|
||||
return self;
|
||||
}
|
||||
|
||||
|
|
224
mediapipe/swift/solutions/lindera/Asensei3D.swift
Normal file
224
mediapipe/swift/solutions/lindera/Asensei3D.swift
Normal file
|
@ -0,0 +1,224 @@
|
|||
import Foundation
|
||||
|
||||
public struct Asensei3DPose {
|
||||
|
||||
|
||||
public let nose: BodyJointDetails?
|
||||
|
||||
public let leftEyeInner: BodyJointDetails?
|
||||
public let leftEye: BodyJointDetails?
|
||||
public let leftEyeOuter: BodyJointDetails?
|
||||
|
||||
public let rightEyeInner: BodyJointDetails?
|
||||
public let rightEye: BodyJointDetails?
|
||||
public let rightEyeOuter: BodyJointDetails?
|
||||
|
||||
public let leftEar: BodyJointDetails?
|
||||
public let rightEar: BodyJointDetails?
|
||||
|
||||
public let mouthLeft: BodyJointDetails?
|
||||
public let mouthRight: BodyJointDetails?
|
||||
|
||||
public let leftShoulder: BodyJointDetails?
|
||||
public let rightShoulder: BodyJointDetails?
|
||||
|
||||
public let leftElbow: BodyJointDetails?
|
||||
public let rightElbow: BodyJointDetails?
|
||||
|
||||
public let leftWrist: BodyJointDetails?
|
||||
public let rightWrist: BodyJointDetails?
|
||||
|
||||
public let leftPinky: BodyJointDetails?
|
||||
public let rightPinky: BodyJointDetails?
|
||||
|
||||
public let leftIndex: BodyJointDetails?
|
||||
public let rightIndex: BodyJointDetails?
|
||||
|
||||
public let leftThumb: BodyJointDetails?
|
||||
public let rightThumb: BodyJointDetails?
|
||||
|
||||
public let leftHip: BodyJointDetails?
|
||||
public let rightHip: BodyJointDetails?
|
||||
|
||||
public let leftKnee: BodyJointDetails?
|
||||
public let rightKnee: BodyJointDetails?
|
||||
|
||||
public let rightAnkle: BodyJointDetails?
|
||||
public let leftAnkle: BodyJointDetails?
|
||||
|
||||
|
||||
public let rightHeel: BodyJointDetails?
|
||||
public let leftHeel: BodyJointDetails?
|
||||
|
||||
public let rightFoot: BodyJointDetails?
|
||||
public let leftFoot: BodyJointDetails?
|
||||
}
|
||||
|
||||
extension Asensei3DPose: Encodable {
|
||||
|
||||
private enum CodingKeys: String, CodingKey {
|
||||
case nose
|
||||
|
||||
case leftEyeInner
|
||||
case leftEye
|
||||
case leftEyeOuter
|
||||
|
||||
case rightEyeInner
|
||||
case rightEye
|
||||
case rightEyeOuter
|
||||
|
||||
case leftEar
|
||||
case rightEar
|
||||
|
||||
case mouthLeft
|
||||
case mouthRight
|
||||
|
||||
case leftShoulder
|
||||
case rightShoulder
|
||||
|
||||
case leftElbow
|
||||
case rightElbow
|
||||
|
||||
case leftWrist
|
||||
case rightWrist
|
||||
|
||||
case leftPinky
|
||||
case rightPinky
|
||||
|
||||
case leftIndex
|
||||
case rightIndex
|
||||
|
||||
case leftThumb
|
||||
case rightThumb
|
||||
|
||||
case leftHip
|
||||
case rightHip
|
||||
|
||||
case leftKnee
|
||||
case rightKnee
|
||||
|
||||
case rightAnkle
|
||||
case leftAnkle
|
||||
|
||||
|
||||
case rightHeel
|
||||
case leftHeel
|
||||
|
||||
case rightFoot
|
||||
case leftFoot
|
||||
}
|
||||
|
||||
public func encode(to encoder: Encoder) throws {
|
||||
var container = encoder.container(keyedBy: CodingKeys.self)
|
||||
|
||||
try container.encodeIfPresent(self.nose, forKey: .nose)
|
||||
|
||||
try container.encodeIfPresent(self.leftEyeInner, forKey: .leftEyeInner)
|
||||
try container.encodeIfPresent(self.leftEye, forKey:.leftEye )
|
||||
try container.encodeIfPresent(self.leftEyeOuter, forKey: .leftEyeOuter)
|
||||
|
||||
try container.encodeIfPresent(self.rightEyeInner, forKey: .rightEyeInner)
|
||||
try container.encodeIfPresent(self.rightEye, forKey: .rightEye)
|
||||
try container.encodeIfPresent(self.rightEyeOuter, forKey: .rightEyeOuter )
|
||||
|
||||
try container.encodeIfPresent(self.leftEar, forKey: .leftEar)
|
||||
try container.encodeIfPresent(self.rightEar, forKey: .rightEar)
|
||||
|
||||
try container.encodeIfPresent(self.mouthLeft, forKey: .mouthLeft)
|
||||
try container.encodeIfPresent(self.mouthRight, forKey: .mouthRight )
|
||||
|
||||
try container.encodeIfPresent(self.leftShoulder, forKey: .leftShoulder)
|
||||
try container.encodeIfPresent(self.rightShoulder, forKey: .rightShoulder)
|
||||
|
||||
try container.encodeIfPresent(self.leftElbow, forKey: .leftElbow)
|
||||
try container.encodeIfPresent(self.rightElbow, forKey:.rightElbow )
|
||||
|
||||
try container.encodeIfPresent(self.leftWrist, forKey: .leftWrist)
|
||||
try container.encodeIfPresent(self.rightWrist, forKey: .rightWrist )
|
||||
|
||||
try container.encodeIfPresent(self.leftPinky, forKey: .leftPinky)
|
||||
try container.encodeIfPresent(self.rightPinky, forKey: .rightPinky)
|
||||
|
||||
try container.encodeIfPresent(self.leftIndex, forKey: .leftIndex )
|
||||
try container.encodeIfPresent(self.rightIndex, forKey:.rightIndex )
|
||||
|
||||
try container.encodeIfPresent(self.leftThumb, forKey: .leftThumb)
|
||||
try container.encodeIfPresent(self.rightThumb, forKey: .rightThumb )
|
||||
|
||||
try container.encodeIfPresent(self.leftHip, forKey: .leftHip)
|
||||
try container.encodeIfPresent(self.rightHip, forKey: .rightHip )
|
||||
|
||||
try container.encodeIfPresent(self.leftKnee, forKey: .leftKnee )
|
||||
try container.encodeIfPresent(self.rightKnee, forKey: .rightKnee )
|
||||
|
||||
try container.encodeIfPresent(self.rightAnkle, forKey: .rightAnkle)
|
||||
try container.encodeIfPresent(self.leftAnkle, forKey: .leftAnkle )
|
||||
|
||||
|
||||
try container.encodeIfPresent(self.rightHeel, forKey: .rightHeel)
|
||||
try container.encodeIfPresent(self.leftHeel, forKey: .leftHeel)
|
||||
|
||||
try container.encodeIfPresent(self.rightFoot, forKey: .rightFoot )
|
||||
try container.encodeIfPresent(self.leftFoot, forKey: .leftFoot)
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
extension Asensei3DPose {
|
||||
|
||||
public struct BodyJointDetails: Encodable {
|
||||
|
||||
public let position: Vector3D
|
||||
public let confidence: Float
|
||||
|
||||
private enum CodingKeys: String, CodingKey {
|
||||
case x
|
||||
case y
|
||||
case z
|
||||
case c
|
||||
}
|
||||
|
||||
public func encode(to encoder: Encoder) throws {
|
||||
var container = encoder.container(keyedBy: CodingKeys.self)
|
||||
try container.encode(self.position.x, forKey: .x)
|
||||
try container.encode(self.position.y, forKey: .y)
|
||||
try container.encode(self.position.z, forKey: .z)
|
||||
try container.encode(self.confidence, forKey: .c)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension Asensei3DPose {
|
||||
|
||||
public struct Vector3D {
|
||||
public let x: Float
|
||||
public let y: Float
|
||||
public let z: Float
|
||||
|
||||
public init(x: Float, y: Float, z: Float) {
|
||||
self.x = x
|
||||
self.y = y
|
||||
self.z = z
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension Asensei3DPose {
|
||||
|
||||
public struct Event: Encodable {
|
||||
public let pose: Asensei3DPose
|
||||
let timestamp: TimeInterval
|
||||
|
||||
private enum CodingKeys: String, CodingKey {
|
||||
case bodyJoints
|
||||
case timestamp
|
||||
}
|
||||
|
||||
public func encode(to encoder: Encoder) throws {
|
||||
var container = encoder.container(keyedBy: CodingKeys.self)
|
||||
try container.encode(self.pose, forKey: .bodyJoints)
|
||||
try container.encode(self.timestamp * 1000, forKey: .timestamp)
|
||||
}
|
||||
}
|
||||
}
|
26
mediapipe/swift/solutions/lindera/BUILD
Normal file
26
mediapipe/swift/solutions/lindera/BUILD
Normal file
|
@ -0,0 +1,26 @@
|
|||
load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library")
|
||||
|
||||
swift_library(
|
||||
name = "lindera",
|
||||
srcs = ["Lindera.swift","Asensei3D.swift","utils.swift"],
|
||||
linkopts = [
|
||||
"-lc++",
|
||||
"-std=c++17",
|
||||
"-lstdc++",
|
||||
],
|
||||
module_name = "LinderaDetection",
|
||||
visibility = ["//visibility:public"],
|
||||
deps = [
|
||||
"//mediapipe/objc/solutions/posetracking_gpu:posetracking_gpu_solution",
|
||||
"//mediapipe/objc:mediapipe_framework_ios",
|
||||
"//mediapipe/objc:mediapipe_input_sources_ios",
|
||||
"//mediapipe/objc:mediapipe_layer_renderer",
|
||||
] + select({
|
||||
"//mediapipe:ios_i386": [],
|
||||
"//mediapipe:ios_x86_64": [],
|
||||
"//conditions:default": [
|
||||
"//mediapipe/graphs/pose_tracking:pose_tracking_gpu_deps",
|
||||
"//mediapipe/framework/formats:landmark_cc_proto",
|
||||
],
|
||||
}),
|
||||
)
|
267
mediapipe/swift/solutions/lindera/Lindera.swift
Normal file
267
mediapipe/swift/solutions/lindera/Lindera.swift
Normal file
|
@ -0,0 +1,267 @@
|
|||
// This is the copperlabs posetracking api built in objective c
|
||||
import MPPoseTracking
|
||||
import UIKit
|
||||
|
||||
|
||||
/// A helper class to run the Pose Tracking API
|
||||
/// TFLite models are also loaded when you initialize this class
|
||||
public final class Lindera{
|
||||
|
||||
|
||||
|
||||
//MARK: - Public Class API
|
||||
|
||||
|
||||
// A delegate to handle results
|
||||
public weak var delegate: LinderaDelegate?
|
||||
|
||||
/// This function sets up your callback function to happen whenver there is an fps update
|
||||
public func setFpsDelegate(fpsDelegate: @escaping (_ fps:Double)->Void){
|
||||
fpsHelper.onFpsUpdate = fpsDelegate;
|
||||
}
|
||||
|
||||
// Get the camera UI View that may contain landmarks drawing
|
||||
public var cameraView: UIView {
|
||||
return self.linderaExerciseSession
|
||||
}
|
||||
|
||||
|
||||
// Show Landmarks - works instantaneously!
|
||||
public func showLandmarks(value:Bool){
|
||||
self.poseTracking.showLandmarks(value)
|
||||
}
|
||||
// Are landmarks already drawn?
|
||||
public func areLandmarksShown() -> Bool{
|
||||
return self.poseTracking.areLandmarksShown()
|
||||
}
|
||||
// Current Model Complexity 0 -> lite; 1 -> full ; 2 -> heavy
|
||||
public func getModelComplexity() -> Int {
|
||||
return Int(self.poseTracking.poseTrackingOptions.modelComplexity);
|
||||
}
|
||||
|
||||
// Set the model complexity and restart detection to load new models
|
||||
public func setModelComplexityNow(complexity:Int){
|
||||
let poseTrackingOptions = poseTracking.poseTrackingOptions
|
||||
|
||||
poseTrackingOptions?.modelComplexity = Int32(complexity)
|
||||
|
||||
poseTracking = PoseTracking(poseTrackingOptions: poseTrackingOptions)
|
||||
startPoseTracking()
|
||||
startCamera()
|
||||
|
||||
}
|
||||
|
||||
public required init(){
|
||||
|
||||
startPoseTracking()
|
||||
}
|
||||
|
||||
|
||||
public func startCamera(_ completion: ((Result<Void, Error>) -> Void)? = nil) {
|
||||
// set our rendering layer frame according to cameraView boundry
|
||||
self.poseTracking.renderer.layer.frame = cameraView.layer.bounds
|
||||
// attach render CALayer on cameraView to render output to
|
||||
self.cameraView.layer.addSublayer(self.poseTracking.renderer.layer)
|
||||
|
||||
self.cameraSource.requestCameraAccess(
|
||||
completionHandler: {(granted:Bool)->Void in
|
||||
if (granted){
|
||||
self.poseTracking.videoQueue.async(execute:{ [weak self] in
|
||||
|
||||
self?.cameraSource.start()
|
||||
|
||||
} )
|
||||
completion?(.success(Void()))
|
||||
}else{
|
||||
|
||||
completion?(.failure(preconditionFailure("Camera Access Not Granted")))
|
||||
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
|
||||
|
||||
}
|
||||
/// Choose front or back camera. Must restart camera after use if already started
|
||||
public func selectCamera(_ position: AVCaptureDevice.Position, _ completion: ((Result<Void, Error>) -> Void)? = nil) {
|
||||
self.poseTracking.videoQueue.async { [weak self] in
|
||||
self?.cameraSource.cameraPosition = position
|
||||
completion?(.success(Void()))
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
// MARK: - Private Class Functions
|
||||
|
||||
// Set your custom view heree
|
||||
private lazy var linderaExerciseSession: UIView = {
|
||||
|
||||
// this will be the main camera view; Change it to custom view class to get desired results
|
||||
let liveView = UIView()
|
||||
|
||||
return liveView
|
||||
|
||||
}()
|
||||
|
||||
|
||||
private func startPoseTracking(){
|
||||
// set camera preferences
|
||||
self.cameraSource.sessionPreset = AVCaptureSession.Preset.high.rawValue
|
||||
self.cameraSource.cameraPosition = AVCaptureDevice.Position.front
|
||||
self.cameraSource.orientation = AVCaptureVideoOrientation.portrait
|
||||
if (self.cameraSource.orientation == AVCaptureVideoOrientation.portrait){
|
||||
self.cameraSource.videoMirrored = true
|
||||
}
|
||||
// call LinderaDelegate on pose tracking results
|
||||
self.poseTracking.poseTrackingResultsListener = {[weak self] results in
|
||||
|
||||
|
||||
guard let self = self, let results = results else {
|
||||
return
|
||||
}
|
||||
|
||||
self.delegate?.lindera(self, didDetect: .init(pose: Asensei3DPose.init(results), timestamp: CMTimeGetSeconds(self.poseTracking.timeStamp)))
|
||||
}
|
||||
self.poseTracking.graphOutputStreamListener = {[weak self] in
|
||||
self?.fpsHelper.logTime()
|
||||
}
|
||||
|
||||
self.poseTracking.startGraph()
|
||||
// attach camera's output with poseTracking object and its videoQueue
|
||||
self.cameraSource.setDelegate(self.poseTracking, queue: self.poseTracking.videoQueue)
|
||||
}
|
||||
|
||||
|
||||
func stopCamera(){
|
||||
if (self.cameraSource.isRunning){
|
||||
self.poseTracking.videoQueue.async { [weak self] in
|
||||
self?.cameraSource.stop()
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
/// switches camera from front to back and vice versa
|
||||
func switchCamera(_ completion: ((Result<Void, Error>) -> Void)? = nil) {
|
||||
self.poseTracking.videoQueue.async { [weak self] in
|
||||
if let self = self {
|
||||
|
||||
self.stopCamera()
|
||||
self.startCamera(completion)
|
||||
|
||||
switch(self.cameraSource.cameraPosition){
|
||||
|
||||
case .unspecified:
|
||||
completion?(.failure(preconditionFailure("Unkown Camera Position")))
|
||||
case .back:
|
||||
self.selectCamera(AVCaptureDevice.Position.front,completion)
|
||||
case .front:
|
||||
self.selectCamera(AVCaptureDevice.Position.back,completion)
|
||||
@unknown default:
|
||||
completion?(.failure(preconditionFailure("Unkown Camera Position")))
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// MARK: - Private Class Objects
|
||||
// initalize the PoseTracking api and load models
|
||||
var poseTracking:PoseTracking = PoseTracking(poseTrackingOptions: PoseTrackingOptions(showLandmarks: true,modelComplexity: 1))
|
||||
|
||||
// Needed to get fps of model
|
||||
let fpsHelper = FPSHelper(smoothingFactor: 0.95)
|
||||
|
||||
// attach Mediapipe camera helper to our class
|
||||
let cameraSource = MPPCameraInputSource()
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
public protocol LinderaDelegate: AnyObject {
|
||||
|
||||
func lindera(_ lindera: Lindera, didDetect event: Asensei3DPose.Event)
|
||||
}
|
||||
|
||||
|
||||
/// Convert PoseLandmarks from PoseTrackingAPI to BodyJointDetails
|
||||
func landmarkToBodyJointDetails(landmark: PoseLandmark) -> Asensei3DPose.BodyJointDetails{
|
||||
return Asensei3DPose.BodyJointDetails(position: .init(x: landmark.x, y: landmark.y, z: landmark.z), confidence: landmark.visibility)
|
||||
}
|
||||
// MARK: - Helpers
|
||||
extension Asensei3DPose {
|
||||
|
||||
init(_ pose: PoseTrackingResults) {
|
||||
|
||||
self.nose = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_NOSE])
|
||||
|
||||
self.leftEyeInner = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_EYE_INNER])
|
||||
self.leftEye = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_EYE])
|
||||
self.leftEyeOuter = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_EYE_OUTER])
|
||||
|
||||
self.rightEyeInner = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_EYE_OUTER])
|
||||
self.rightEye = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_EYE])
|
||||
self.rightEyeOuter = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_EYE_OUTER])
|
||||
|
||||
self.leftEar = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_EAR])
|
||||
self.rightEar = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_EAR])
|
||||
|
||||
self.mouthLeft = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_MOUTH_LEFT])
|
||||
self.mouthRight = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_MOUTH_RIGHT])
|
||||
|
||||
self.leftShoulder = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_SHOULDER])
|
||||
self.rightShoulder = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_SHOULDER])
|
||||
|
||||
self.leftElbow = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_ELBOW])
|
||||
self.rightElbow = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_ELBOW])
|
||||
|
||||
self.leftWrist = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_WRIST])
|
||||
self.rightWrist = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_WRIST])
|
||||
|
||||
self.leftPinky = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_PINKY])
|
||||
self.rightPinky = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_PINKY])
|
||||
|
||||
self.leftIndex = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_INDEX])
|
||||
self.rightIndex = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_INDEX])
|
||||
|
||||
self.leftThumb = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_THUMB])
|
||||
self.rightThumb = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_THUMB])
|
||||
|
||||
self.leftHip = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_HIP])
|
||||
self.rightHip = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_HIP])
|
||||
|
||||
self.leftKnee = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_KNEE])
|
||||
self.rightKnee = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_KNEE])
|
||||
|
||||
self.rightAnkle = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_ANKLE])
|
||||
self.leftAnkle = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_ANKLE])
|
||||
|
||||
|
||||
self.rightHeel = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_HEEL])
|
||||
self.leftHeel = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_HEEL])
|
||||
|
||||
self.rightFoot = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_FOOT])
|
||||
self.leftFoot = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_FOOT])
|
||||
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
//extension Asensei3DPose.Vector3D {
|
||||
//
|
||||
// init(_ vector: Lindera3DVector) {
|
||||
// self.x = -vector.x
|
||||
// self.y = vector.z
|
||||
// self.z = vector.y
|
||||
// }
|
||||
//}
|
45
mediapipe/swift/solutions/lindera/utils.swift
Normal file
45
mediapipe/swift/solutions/lindera/utils.swift
Normal file
|
@ -0,0 +1,45 @@
|
|||
//
|
||||
// utils.swift
|
||||
// Mediapipe
|
||||
//
|
||||
// Created by Mautisim Munir on 21/10/2022.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
|
||||
|
||||
public class FPSHelper{
|
||||
var smoothingFactor = 0.8
|
||||
var _fps:Double? = nil
|
||||
var time: CFAbsoluteTime? = nil
|
||||
public var onFpsUpdate : ((_ fps:Double)->Void)? = nil
|
||||
init(smoothingFactor:Double) {
|
||||
self.smoothingFactor = smoothingFactor
|
||||
}
|
||||
|
||||
public func logTime(){
|
||||
|
||||
let currTime = CFAbsoluteTimeGetCurrent()
|
||||
if (time != nil){
|
||||
|
||||
let elapsedTime = currTime - time!
|
||||
let fps = 1/Double(elapsedTime)
|
||||
if (_fps == nil){
|
||||
_fps = fps
|
||||
}else{
|
||||
_fps = (1-smoothingFactor)*fps + smoothingFactor*_fps!
|
||||
}
|
||||
if (onFpsUpdate != nil){
|
||||
onFpsUpdate?(_fps!)
|
||||
}
|
||||
|
||||
}
|
||||
time = currTime
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
}
|
Loading…
Reference in New Issue
Block a user