posetracking solution api working in swift

This commit is contained in:
Mautisim Munir 2022-10-17 12:40:59 +05:00
parent c8e64f8720
commit 09f98fbad0
29 changed files with 1001 additions and 3 deletions

View File

@ -23,8 +23,7 @@
"mediapipe/objc/testing/app/BUILD" "mediapipe/objc/testing/app/BUILD"
], ],
"buildTargets" : [ "buildTargets" : [
"//mediapipe/examples/ios/posetrackingsolution-swift:app_lib", "//mediapipe/examples/ios/posetracking-lindera/PoseTrackingLindera:posetracking-lindera",
"//mediapipe/examples/ios/posetrackingsolution-swift:posetracking-solution-swift",
"//mediapipe/examples/ios/posetrackingsolutiongpu:PoseTrackingSolutionGpuApp", "//mediapipe/examples/ios/posetrackingsolutiongpu:PoseTrackingSolutionGpuApp",
"//mediapipe/swift/solutions/lindera:Lindera" "//mediapipe/swift/solutions/lindera:Lindera"
], ],
@ -93,6 +92,9 @@
"mediapipe/examples/ios/iristrackinggpu", "mediapipe/examples/ios/iristrackinggpu",
"mediapipe/examples/ios/objectdetectioncpu", "mediapipe/examples/ios/objectdetectioncpu",
"mediapipe/examples/ios/objectdetectiongpu", "mediapipe/examples/ios/objectdetectiongpu",
"mediapipe/examples/ios/posetracking-lindera",
"mediapipe/examples/ios/posetracking-lindera/PoseTrackingLindera",
"mediapipe/examples/ios/posetracking-lindera/PoseTrackingLindera/Base.lproj",
"mediapipe/examples/ios/posetrackinggpu", "mediapipe/examples/ios/posetrackinggpu",
"mediapipe/examples/ios/posetrackingsolution-swift", "mediapipe/examples/ios/posetrackingsolution-swift",
"mediapipe/examples/ios/posetrackingsolution-swift/Camera", "mediapipe/examples/ios/posetrackingsolution-swift/Camera",

View File

@ -24,8 +24,8 @@
"mediapipe/examples/ios/objectdetectioncpu", "mediapipe/examples/ios/objectdetectioncpu",
"mediapipe/examples/ios/objectdetectiongpu", "mediapipe/examples/ios/objectdetectiongpu",
"mediapipe/examples/ios/objectdetectiontrackinggpu", "mediapipe/examples/ios/objectdetectiontrackinggpu",
"mediapipe/examples/ios/posetracking-lindera/PoseTrackingLindera",
"mediapipe/examples/ios/posetrackinggpu", "mediapipe/examples/ios/posetrackinggpu",
"mediapipe/examples/ios/posetrackingsolution-swift",
"mediapipe/examples/ios/posetrackingsolutiongpu", "mediapipe/examples/ios/posetrackingsolutiongpu",
"mediapipe/examples/ios/selfiesegmentationgpu", "mediapipe/examples/ios/selfiesegmentationgpu",
"mediapipe/objc", "mediapipe/objc",

View File

@ -0,0 +1,22 @@
//
// AppDelegate.swift
// PoseTrackingLindera
//
// Created by Mautisim Munir on 17/10/2022.
//
import UIKit
@main
class AppDelegate: UIResponder, UIApplicationDelegate {
var window: UIWindow?
func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool {
// Override point for customization after application launch.
return true
}
}

View File

@ -0,0 +1,11 @@
{
"colors" : [
{
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@ -0,0 +1,93 @@
{
"images" : [
{
"idiom" : "iphone",
"scale" : "2x",
"size" : "20x20"
},
{
"idiom" : "iphone",
"scale" : "3x",
"size" : "20x20"
},
{
"idiom" : "iphone",
"scale" : "2x",
"size" : "29x29"
},
{
"idiom" : "iphone",
"scale" : "3x",
"size" : "29x29"
},
{
"idiom" : "iphone",
"scale" : "2x",
"size" : "40x40"
},
{
"idiom" : "iphone",
"scale" : "3x",
"size" : "40x40"
},
{
"idiom" : "iphone",
"scale" : "2x",
"size" : "60x60"
},
{
"idiom" : "iphone",
"scale" : "3x",
"size" : "60x60"
},
{
"idiom" : "ipad",
"scale" : "1x",
"size" : "20x20"
},
{
"idiom" : "ipad",
"scale" : "2x",
"size" : "20x20"
},
{
"idiom" : "ipad",
"scale" : "1x",
"size" : "29x29"
},
{
"idiom" : "ipad",
"scale" : "2x",
"size" : "29x29"
},
{
"idiom" : "ipad",
"scale" : "1x",
"size" : "40x40"
},
{
"idiom" : "ipad",
"scale" : "2x",
"size" : "40x40"
},
{
"idiom" : "ipad",
"scale" : "2x",
"size" : "76x76"
},
{
"idiom" : "ipad",
"scale" : "2x",
"size" : "83.5x83.5"
},
{
"idiom" : "ios-marketing",
"scale" : "1x",
"size" : "1024x1024"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@ -0,0 +1,6 @@
{
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@ -0,0 +1,78 @@
load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library")
load("@build_bazel_rules_apple//apple:ios.bzl", "ios_application")
load("@build_bazel_rules_apple//apple:resources.bzl", "apple_resource_bundle")
load(
"//mediapipe/examples/ios:bundle_id.bzl",
"BUNDLE_ID_PREFIX",
"example_provisioning",
)
MIN_IOS_VERSION = "14.0"
swift_library(
name = "lindera_app_lib",
srcs = glob(["**/*.swift"]),
data = [
"//mediapipe/graphs/pose_tracking:pose_tracking_gpu.binarypb",
"//mediapipe/modules/pose_detection:pose_detection.tflite",
"//mediapipe/modules/pose_landmark:pose_landmark_full.tflite",
] + [
"Base.lproj/LaunchScreen.storyboard",
"Base.lproj/Main.storyboard",
],
linkopts = [
"-lc++",
"-std=c++17",
"-lstdc++",
],
module_name = "lindera_app_lib",
visibility = ["//visibility:public"],
deps = [
"@ios_opencv//:OpencvFramework",
] + [
"//mediapipe/objc/solutions/posetracking_gpu:posetracking_gpu_solution",
"//mediapipe/objc:mediapipe_framework_ios",
"//mediapipe/objc:mediapipe_input_sources_ios",
"//mediapipe/objc:mediapipe_layer_renderer",
] + select({
"//mediapipe:ios_i386": [],
"//mediapipe:ios_x86_64": [],
"//conditions:default": [
"//mediapipe/graphs/pose_tracking:pose_tracking_gpu_deps",
"//mediapipe/framework/formats:landmark_cc_proto",
],
}),
)
apple_resource_bundle(
name = "app_resources",
bundle_id = BUNDLE_ID_PREFIX + ".PoseTrackingGpu",
resources = [
"Assets.xcassets",
],
visibility = ["//visibility:public"],
)
ios_application(
name = "posetracking-lindera",
bundle_id = BUNDLE_ID_PREFIX + ".PoseTrackingGpu",
families = [
"iphone",
"ipad",
],
infoplists = ["Info.plist",
"//mediapipe/examples/ios/common:Info.plist",
],
linkopts = [
"-lc++",
],
minimum_os_version = MIN_IOS_VERSION,
provisioning_profile = example_provisioning(),
resources = [":app_resources"],
visibility = ["//visibility:public"],
deps = [
":lindera_app_lib",
":app_resources",
# "@ios_opencv//:OpencvFramework",
],
)

View File

@ -0,0 +1,32 @@
<?xml version="1.0" encoding="UTF-8"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="20037" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" launchScreen="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES" initialViewController="01J-lp-oVM">
<device id="retina6_1" orientation="portrait" appearance="light"/>
<dependencies>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="20020"/>
<capability name="Safe area layout guides" minToolsVersion="9.0"/>
<capability name="System colors in document resources" minToolsVersion="11.0"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<scenes>
<!--View Controller-->
<scene sceneID="EHf-IW-A2E">
<objects>
<viewController id="01J-lp-oVM" sceneMemberID="viewController">
<view key="view" contentMode="scaleToFill" id="Ze5-6b-2t3">
<rect key="frame" x="0.0" y="0.0" width="414" height="896"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<viewLayoutGuide key="safeArea" id="6Tk-OE-BBY"/>
<color key="backgroundColor" systemColor="systemBackgroundColor"/>
</view>
</viewController>
<placeholder placeholderIdentifier="IBFirstResponder" id="iYj-Kq-Ea1" userLabel="First Responder" sceneMemberID="firstResponder"/>
</objects>
<point key="canvasLocation" x="53" y="375"/>
</scene>
</scenes>
<resources>
<systemColor name="systemBackgroundColor">
<color white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
</systemColor>
</resources>
</document>

View File

@ -0,0 +1,35 @@
<?xml version="1.0" encoding="UTF-8"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="20037" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES" initialViewController="BYZ-38-t0r">
<device id="retina6_1" orientation="portrait" appearance="light"/>
<dependencies>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="20020"/>
<capability name="Safe area layout guides" minToolsVersion="9.0"/>
<capability name="System colors in document resources" minToolsVersion="11.0"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<scenes>
<!--View Controller-->
<scene sceneID="tne-QT-ifu">
<objects>
<viewController id="BYZ-38-t0r" customClass="ViewController" customModuleProvider="target" sceneMemberID="viewController">
<view key="view" contentMode="scaleToFill" id="8bC-Xf-vdC">
<rect key="frame" x="0.0" y="0.0" width="414" height="896"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<viewLayoutGuide key="safeArea" id="6Tk-OE-BBY"/>
<color key="backgroundColor" systemColor="systemBackgroundColor"/>
</view>
<connections>
<outlet property="liveView" destination="8bC-Xf-vdC" id="COw-5j-lAL"/>
</connections>
</viewController>
<placeholder placeholderIdentifier="IBFirstResponder" id="dkx-z0-nzr" sceneMemberID="firstResponder"/>
</objects>
<point key="canvasLocation" x="30" y="43"/>
</scene>
</scenes>
<resources>
<systemColor name="systemBackgroundColor">
<color white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
</systemColor>
</resources>
</document>

View File

@ -0,0 +1,35 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>UIApplicationSceneManifest</key>
<dict>
<key>NSCameraUsageDescription</key>
<string>This app uses the camera to demonstrate live video processing.</string>
<key>CFBundleDevelopmentRegion</key>
<string>en</string>
<key>CFBundleExecutable</key>
<string>$(EXECUTABLE_NAME)</string>
<key>CFBundleIdentifier</key>
<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>$(PRODUCT_NAME)</string>
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleShortVersionString</key>
<string>1.0</string>
<key>CFBundleVersion</key>
<string>1</string>
<key>LSRequiresIPhoneOS</key>
<true/>
<key>UIApplicationSupportsMultipleScenes</key>
<false/>
<key>MainViewController</key>
<string>ViewController</string>
<key>UILaunchStoryboardName</key>
<string>LaunchScreen</string>
</dict>
</dict>
</plist>

View File

@ -0,0 +1,40 @@
//
// ViewController.swift
// PoseTrackingLindera
//
// Created by Mautisim Munir on 17/10/2022.
//
import UIKit
import MPPoseTracking
class ViewController: UIViewController {
let poseTracking:PoseTracking = PoseTracking(poseTrackingOptions: PoseTrackingOptions(showLandmarks: true));
let cameraSource = MPPCameraInputSource();
@IBOutlet var liveView:UIView?;
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
self.poseTracking.renderer.layer.frame = self.liveView!.layer.bounds
self.liveView?.layer.addSublayer(self.poseTracking.renderer.layer)
self.cameraSource.sessionPreset = AVCaptureSession.Preset.high.rawValue;
self.cameraSource.cameraPosition = AVCaptureDevice.Position.front;
self.cameraSource.orientation = AVCaptureVideoOrientation.portrait;
if (self.cameraSource.orientation == AVCaptureVideoOrientation.portrait){
self.cameraSource.videoMirrored = true;
}
self.cameraSource.requestCameraAccess(
completionHandler: {(granted:Bool)->Void
in
if (granted){
self.poseTracking.start(withCamera: self.cameraSource)
}
})
}
}

View File

@ -0,0 +1,11 @@
{
"colors" : [
{
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@ -0,0 +1,98 @@
{
"images" : [
{
"idiom" : "iphone",
"scale" : "2x",
"size" : "20x20"
},
{
"idiom" : "iphone",
"scale" : "3x",
"size" : "20x20"
},
{
"idiom" : "iphone",
"scale" : "2x",
"size" : "29x29"
},
{
"idiom" : "iphone",
"scale" : "3x",
"size" : "29x29"
},
{
"idiom" : "iphone",
"scale" : "2x",
"size" : "40x40"
},
{
"idiom" : "iphone",
"scale" : "3x",
"size" : "40x40"
},
{
"idiom" : "iphone",
"scale" : "2x",
"size" : "60x60"
},
{
"idiom" : "iphone",
"scale" : "3x",
"size" : "60x60"
},
{
"idiom" : "ipad",
"scale" : "1x",
"size" : "20x20"
},
{
"idiom" : "ipad",
"scale" : "2x",
"size" : "20x20"
},
{
"idiom" : "ipad",
"scale" : "1x",
"size" : "29x29"
},
{
"idiom" : "ipad",
"scale" : "2x",
"size" : "29x29"
},
{
"idiom" : "ipad",
"scale" : "1x",
"size" : "40x40"
},
{
"idiom" : "ipad",
"scale" : "2x",
"size" : "40x40"
},
{
"idiom" : "ipad",
"scale" : "1x",
"size" : "76x76"
},
{
"idiom" : "ipad",
"scale" : "2x",
"size" : "76x76"
},
{
"idiom" : "ipad",
"scale" : "2x",
"size" : "83.5x83.5"
},
{
"idiom" : "ios-marketing",
"scale" : "1x",
"size" : "1024x1024"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@ -0,0 +1,6 @@
{
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@ -0,0 +1,21 @@
{
"images" : [
{
"filename" : "man-selfie.jpg",
"idiom" : "universal",
"scale" : "1x"
},
{
"idiom" : "universal",
"scale" : "2x"
},
{
"idiom" : "universal",
"scale" : "3x"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.1 MiB

View File

@ -0,0 +1,21 @@
{
"images" : [
{
"filename" : "test-people.jpg",
"idiom" : "universal",
"scale" : "1x"
},
{
"idiom" : "universal",
"scale" : "2x"
},
{
"idiom" : "universal",
"scale" : "3x"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 277 KiB

View File

@ -0,0 +1,73 @@
load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library")
load("@build_bazel_rules_apple//apple:ios.bzl", "ios_application")
load("@build_bazel_rules_apple//apple:resources.bzl", "apple_resource_bundle")
load(
"//mediapipe/examples/ios:bundle_id.bzl",
"BUNDLE_ID_PREFIX",
"example_provisioning",
)
MIN_IOS_VERSION = "14.0"
swift_library(
name = "app_lib",
srcs = glob(["**/*.swift"]),
data = [
"//mediapipe/graphs/pose_tracking:pose_tracking_gpu.binarypb",
"//mediapipe/modules/pose_detection:pose_detection.tflite",
"//mediapipe/modules/pose_landmark:pose_landmark_full.tflite",
],
linkopts = [
"-lc++",
"-std=c++17",
"-lstdc++",
],
module_name = "app_lib",
visibility = ["//visibility:public"],
deps = [
"@ios_opencv//:OpencvFramework",
] + [
"//mediapipe/objc/solutions/posetracking_gpu:posetracking_gpu_solution",
"//mediapipe/objc:mediapipe_framework_ios",
"//mediapipe/objc:mediapipe_input_sources_ios",
"//mediapipe/objc:mediapipe_layer_renderer",
] + select({
"//mediapipe:ios_i386": [],
"//mediapipe:ios_x86_64": [],
"//conditions:default": [
"//mediapipe/graphs/pose_tracking:pose_tracking_gpu_deps",
"//mediapipe/framework/formats:landmark_cc_proto",
],
}),
)
apple_resource_bundle(
name = "app_resources",
bundle_id = BUNDLE_ID_PREFIX + ".PoseTrackingGpu",
resources = [
"Assets.xcassets",
],
visibility = ["//visibility:public"],
)
ios_application(
name = "posetracking-solution-swift",
bundle_id = BUNDLE_ID_PREFIX + ".PoseTrackingGpu",
families = [
"iphone",
"ipad",
],
infoplists = ["Info.plist"],
linkopts = [
"-lc++",
],
minimum_os_version = MIN_IOS_VERSION,
provisioning_profile = example_provisioning(),
resources = [":app_resources"],
visibility = ["//visibility:public"],
deps = [
":app_lib",
":app_resources",
# "@ios_opencv//:OpencvFramework",
],
)

View File

@ -0,0 +1,32 @@
import Foundation
enum CameraError: Error {
case cameraUnavailable
case cannotAddInput
case cannotAddOutput
case createCaptureInput(Error)
case deniedAuthorization
case restrictedAuthorization
case unknownAuthorization
}
extension CameraError: LocalizedError {
var errorDescription: String? {
switch self {
case .cameraUnavailable:
return "Camera unavailable"
case .cannotAddInput:
return "Cannot add capture input to session"
case .cannotAddOutput:
return "Cannot add video output to session"
case .createCaptureInput(let error):
return "Creating capture input for camera: \(error.localizedDescription)"
case .deniedAuthorization:
return "Camera access denied"
case .restrictedAuthorization:
return "Attempting to access a restricted capture device"
case .unknownAuthorization:
return "Unknown authorization status for capture device"
}
}
}

View File

@ -0,0 +1,142 @@
import AVFoundation
// 1
class CameraManager: ObservableObject {
// 1
@Published var error: CameraError?
// 2
let session = AVCaptureSession()
// 3
private let sessionQueue = DispatchQueue(label: "com.raywenderlich.SessionQ")
// 4
private let videoOutput = AVCaptureVideoDataOutput()
// 5
private var status = Status.unconfigured
// 2
enum Status {
case unconfigured
case configured
case unauthorized
case failed
}
// 3
static let shared = CameraManager()
// 4
private init() {
configure()
}
// 5
private func configure() {
checkPermissions()
sessionQueue.async {
self.configureCaptureSession()
self.session.startRunning()
}
}
func set(
_ delegate: AVCaptureVideoDataOutputSampleBufferDelegate,
queue: DispatchQueue
) {
sessionQueue.async {
self.videoOutput.setSampleBufferDelegate(delegate, queue: queue)
}
}
private func set(error: CameraError?) {
DispatchQueue.main.async {
self.error = error
}
}
private func checkPermissions() {
// 1
switch AVCaptureDevice.authorizationStatus(for: .video) {
case .notDetermined:
// 2
sessionQueue.suspend()
AVCaptureDevice.requestAccess(for: .video) { authorized in
// 3
if !authorized {
self.status = .unauthorized
self.set(error: .deniedAuthorization)
}
self.sessionQueue.resume()
}
// 4
case .restricted:
status = .unauthorized
set(error: .restrictedAuthorization)
case .denied:
status = .unauthorized
set(error: .deniedAuthorization)
// 5
case .authorized:
break
// 6
@unknown default:
status = .unauthorized
set(error: .unknownAuthorization)
}
}
private func configureCaptureSession() {
guard status == .unconfigured else {
return
}
session.beginConfiguration()
defer {
session.commitConfiguration()
}
let device = AVCaptureDevice.default(
.builtInWideAngleCamera,
for: .video,
position: .front)
guard let camera = device else {
set(error: .cameraUnavailable)
status = .failed
return
}
do {
// 1
let cameraInput = try AVCaptureDeviceInput(device: camera)
// 2
if session.canAddInput(cameraInput) {
session.addInput(cameraInput)
} else {
// 3
set(error: .cannotAddInput)
status = .failed
return
}
} catch {
// 4
set(error: .createCaptureInput(error))
status = .failed
return
}
// 1
if session.canAddOutput(videoOutput) {
session.addOutput(videoOutput)
// 2
videoOutput.videoSettings =
[kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA]
// 3
let videoConnection = videoOutput.connection(with: .video)
videoConnection?.videoOrientation = .portrait
} else {
// 4
set(error: .cannotAddOutput)
status = .failed
return
}
}
}

View File

@ -0,0 +1,33 @@
import AVFoundation
// 1
class FrameManager: NSObject, ObservableObject {
// 2
static let shared = FrameManager()
// 3
@Published var current: CVPixelBuffer?
// 4
let videoOutputQueue = DispatchQueue(
label: "com.raywenderlich.VideoOutputQ",
qos: .userInitiated,
attributes: [],
autoreleaseFrequency: .workItem)
// 5
private override init() {
super.init()
CameraManager.shared.set(self, queue: videoOutputQueue)
}
}
extension FrameManager: AVCaptureVideoDataOutputSampleBufferDelegate {
func captureOutput(
_ output: AVCaptureOutput,
didOutput sampleBuffer: CMSampleBuffer,
from connection: AVCaptureConnection
) {
if let buffer = sampleBuffer.imageBuffer {
DispatchQueue.main.async {
self.current = buffer
}
}
}
}

View File

@ -0,0 +1,35 @@
import SwiftUI
import MPPoseTracking
struct ContentView: View {
@StateObject private var model = ContentViewModel()
let poseTracking = PoseTracking(poseTrackingOptions: PoseTrackingOptions(showLandmarks: true))
init() {
poseTracking?.renderer.layer.frame = self.body.layer
}
var body: some View {
VStack{
FrameView(image: model.frame)
.edgesIgnoringSafeArea(.all)
// buildInferenceView()
}
}
}
struct ContentView_Previews: PreviewProvider {
static var previews: some View {
ContentView()
}
}

View File

@ -0,0 +1,47 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>NSPhotoLibraryUsageDescription</key>
<string>We need to access your library</string>
<key>CFBundleDevelopmentRegion</key>
<string>$(DEVELOPMENT_LANGUAGE)</string>
<key>CFBundleExecutable</key>
<string>$(EXECUTABLE_NAME)</string>
<key>CFBundleIdentifier</key>
<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>$(PRODUCT_NAME)</string>
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleShortVersionString</key>
<string>1.0</string>
<key>CFBundleVersion</key>
<string>1</string>
<key>LSRequiresIPhoneOS</key>
<true/>
<key>NSCameraUsageDescription</key>
<string>We need to access your Camera</string>
<key>UIApplicationSupportsIndirectInputEvents</key>
<true/>
<key>UIRequiredDeviceCapabilities</key>
<array>
<string>armv7</string>
</array>
<key>UISupportedInterfaceOrientations</key>
<array>
<string>UIInterfaceOrientationPortrait</string>
<string>UIInterfaceOrientationLandscapeLeft</string>
<string>UIInterfaceOrientationLandscapeRight</string>
</array>
<key>UISupportedInterfaceOrientations~ipad</key>
<array>
<string>UIInterfaceOrientationPortrait</string>
<string>UIInterfaceOrientationPortraitUpsideDown</string>
<string>UIInterfaceOrientationLandscapeLeft</string>
<string>UIInterfaceOrientationLandscapeRight</string>
</array>
</dict>
</plist>

View File

@ -0,0 +1,17 @@
//
// ModelsLabTestApp.swift
// ModelsLabTest
//
// Created by Mautisim Munir on 02/06/2022.
//
import SwiftUI
@main
struct ModelsLabTestApp: App {
var body: some Scene {
WindowGroup {
ContentView()
}
}
}

View File

@ -0,0 +1,6 @@
{
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@ -0,0 +1,60 @@
//
// ContentViewModel.swift
// ModelsLabTest
//
// Created by Mautisim Munir on 12/06/2022.
//
import Foundation
import CoreImage
import UIKit
import SwiftUI
import MPPoseTracking
class ContentViewModel: ObservableObject {
// 1
@Published var frame: CGImage?
// 2
private let frameManager = FrameManager.shared
var counter = 0
// let modelPath = Bundle.main.path(forResource: "model", ofType: "edgem")!
// let model:EdgeModel
init() {
// model = EdgeModel(modelPath: modelPath)
setupSubscriptions()
}
// 3
func setupSubscriptions() {
// 1
frameManager.$current
// 2
.receive(on: RunLoop.main)
// 3
.compactMap{
buffer in
if buffer != nil {
let ciContext = CIContext()
let ciImage = CIImage(cvImageBuffer: buffer!)
let cgImage = ciContext.createCGImage(ciImage, from: ciImage.extent)
return cgImage;
}
return nil
}
.assign(to: &$frame)
}
}

View File

@ -0,0 +1,41 @@
//
// FrameView.swift
// ModelsLabTest
//
// Created by Mautisim Munir on 12/06/2022.
//
import SwiftUI
struct FrameView: View {
var image: CGImage?
private let label = Text("Camera feed")
var body: some View {
// 1
if let image = image {
// 2
GeometryReader { geometry in
// 3
Image(image, scale: 1.0, orientation: .upMirrored, label: label)
.resizable()
// .scaledToFit()
.scaledToFill()
.frame(
width: geometry.size.width,
height: geometry.size.height,
alignment: .center)
.clipped()
}
} else {
// 4
Color.black
}
}
}
struct FrameView_Previews: PreviewProvider {
static var previews: some View {
FrameView()
}
}

View File

@ -8,6 +8,7 @@ objc_library(
hdrs = [ hdrs = [
"PoseTracking.h", "PoseTracking.h",
"PoseTrackingOptions.h", "PoseTrackingOptions.h",
"PoseTrackingResults.h"
], ],
copts = [ copts = [
"-Wno-shorten-64-to-32", "-Wno-shorten-64-to-32",