diff --git a/mediapipe/examples/ios/posetracking-lindera/PoseTrackingLindera/Base.lproj/Main.storyboard b/mediapipe/examples/ios/posetracking-lindera/PoseTrackingLindera/Base.lproj/Main.storyboard
index 4248c5154..9ecdfe197 100644
--- a/mediapipe/examples/ios/posetracking-lindera/PoseTrackingLindera/Base.lproj/Main.storyboard
+++ b/mediapipe/examples/ios/posetracking-lindera/PoseTrackingLindera/Base.lproj/Main.storyboard
@@ -24,7 +24,7 @@
-
+
@@ -49,12 +49,20 @@
+
+
diff --git a/mediapipe/examples/ios/posetracking-lindera/PoseTrackingLindera/ViewController.swift b/mediapipe/examples/ios/posetracking-lindera/PoseTrackingLindera/ViewController.swift
index 35346d71d..6c7a37858 100644
--- a/mediapipe/examples/ios/posetracking-lindera/PoseTrackingLindera/ViewController.swift
+++ b/mediapipe/examples/ios/posetracking-lindera/PoseTrackingLindera/ViewController.swift
@@ -14,6 +14,7 @@ class ViewController: UIViewController {
@IBOutlet var showLandmarksButton: UIButton!
@IBOutlet var chooseModelButton: UIButton!
@IBOutlet var titleview: UIView!
+ @IBOutlet var fpsLabel: UILabel!
func updateLandmarksButtonText(){
if (lindera.areLandmarksShown()){
@@ -94,9 +95,10 @@ class ViewController: UIViewController {
/// A simple LinderaDelegate implementation that prints nose coordinates if detected
class LinderaDelegateImpl:LinderaDelegate{
func lindera(_ lindera: Lindera, didDetect event: Asensei3DPose.Event) {
- if let kpt = event.pose.nose{
- print("LinderaDelegateImpl: Nose Keypoint (\(String(describing: kpt.position.x)),\(String(describing: kpt.position.y)),\(kpt.position.z)) with confidence \(kpt.confidence)")
- }
+// if let kpt = event.pose.nose{
+// // Printing causes large drops in FPS
+// print("LinderaDelegateImpl: Nose Keypoint (\(String(describing: kpt.position.x)),\(String(describing: kpt.position.y)),\(kpt.position.z)) with confidence \(kpt.confidence)")
+// }
}
@@ -131,8 +133,15 @@ class ViewController: UIViewController {
lindera.startCamera()
-
+ self.lindera.setFpsDelegate(fpsDelegate: {[weak self] fps in
+ DispatchQueue.main.async {
+ self?.fpsLabel.text = "\(Int(fps)) fps"
+ }
+
+ })
self.liveView.bringSubviewToFront(titleview)
+ self.liveView.bringSubviewToFront(fpsLabel)
+
updateLandmarksButtonText()
updateModelButtonText()
diff --git a/mediapipe/objc/solutions/posetracking_gpu/PoseTracking.h b/mediapipe/objc/solutions/posetracking_gpu/PoseTracking.h
index a927273d8..65f1b392c 100644
--- a/mediapipe/objc/solutions/posetracking_gpu/PoseTracking.h
+++ b/mediapipe/objc/solutions/posetracking_gpu/PoseTracking.h
@@ -42,6 +42,9 @@
// Codeblock that runs whenever pose tracking results are available
@property(nonatomic) void(^poseTrackingResultsListener)(PoseTrackingResults*);
+// Codeblock that runs whenever output is available
+@property(nonatomic) void(^graphOutputStreamListener)();
+
- (instancetype) initWithPoseTrackingOptions: (PoseTrackingOptions*) poseTrackingOptions;
- (void)startGraph;
- (void) startWithCamera: (MPPCameraInputSource*) cameraSource;
diff --git a/mediapipe/objc/solutions/posetracking_gpu/PoseTracking.mm b/mediapipe/objc/solutions/posetracking_gpu/PoseTracking.mm
index 6414f22e3..4ea760cd6 100644
--- a/mediapipe/objc/solutions/posetracking_gpu/PoseTracking.mm
+++ b/mediapipe/objc/solutions/posetracking_gpu/PoseTracking.mm
@@ -16,6 +16,8 @@ static const char* kLandmarksOutputStream = "pose_landmarks";
@property (nonatomic) const char* graphOutputStream;
@property (nonatomic) MPPLayerRenderer* renderer;
@property(nonatomic) void(^poseTrackingResultsListener)(PoseTrackingResults*);
+@property(nonatomic) void(^graphOutputStreamListener)();
+
-(id) initWithMediapipeGraph: (MPPGraph*) graph graphOutputStream: (const char*) graphOutputStream
renderer: (MPPLayerRenderer*) renderer;
@@ -47,6 +49,8 @@ static const char* kLandmarksOutputStream = "pose_landmarks";
didOutputPixelBuffer:(CVPixelBufferRef)pixelBuffer
fromStream:(const std::string&)streamName {
if (streamName == self.graphOutputStream) {
+ self.graphOutputStreamListener();
+
// Display the captured image on the screen.
CVPixelBufferRetain(pixelBuffer);
dispatch_async(dispatch_get_main_queue(), ^{
@@ -163,6 +167,11 @@ static const char* kLandmarksOutputStream = "pose_landmarks";
weakSelf.poseTrackingResultsListener(results);
};
+ self -> poseTrackingGraphDelegate.graphOutputStreamListener = ^(){
+ if (weakSelf.graphOutputStream != nil)
+ weakSelf.graphOutputStreamListener();
+ };
+
diff --git a/mediapipe/swift/solutions/lindera/BUILD b/mediapipe/swift/solutions/lindera/BUILD
index 6d97f1502..42c4f0851 100644
--- a/mediapipe/swift/solutions/lindera/BUILD
+++ b/mediapipe/swift/solutions/lindera/BUILD
@@ -2,7 +2,7 @@ load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library")
swift_library(
name = "lindera",
- srcs = ["Lindera.swift","Asensei3D.swift"],
+ srcs = ["Lindera.swift","Asensei3D.swift","utils.swift"],
linkopts = [
"-lc++",
"-std=c++17",
diff --git a/mediapipe/swift/solutions/lindera/Lindera.swift b/mediapipe/swift/solutions/lindera/Lindera.swift
index 12d961f9d..eeffc1602 100644
--- a/mediapipe/swift/solutions/lindera/Lindera.swift
+++ b/mediapipe/swift/solutions/lindera/Lindera.swift
@@ -8,6 +8,11 @@ import UIKit
public final class Lindera{
// initalize the PoseTracking api and load models
var poseTracking:PoseTracking = PoseTracking(poseTrackingOptions: PoseTrackingOptions(showLandmarks: true,modelComplexity: 1))
+ let fpsHelper = FPSHelper(smoothingFactor: 0.95)
+ public func setFpsDelegate(fpsDelegate: @escaping (_ fps:Double)->Void){
+ fpsHelper.onFpsUpdate = fpsDelegate;
+ }
+
// attach Mediapipe camera helper to our class
let cameraSource = MPPCameraInputSource()
@@ -44,10 +49,7 @@ public final class Lindera{
}
-// public func getModelComplexity() -> Int{
-// return self.poseTracking
-// }
- // Initializes pipeline parameters and starts mediapipe graph
+
private lazy var linderaExerciseSession: UIView = {
// this will be the main camera view
@@ -70,17 +72,26 @@ public final class Lindera{
}
// call LinderaDelegate on pose tracking results
self.poseTracking.poseTrackingResultsListener = {[weak self] results in
+
+
guard let self = self, let results = results else {
return
}
+
self.delegate?.lindera(self, didDetect: .init(pose: Asensei3DPose.init(results), timestamp: CMTimeGetSeconds(self.poseTracking.timeStamp)))
}
+ self.poseTracking.graphOutputStreamListener = {[weak self] in
+ self?.fpsHelper.logTime()
+ }
self.poseTracking.startGraph()
// attach camera's output with poseTracking object and its videoQueue
self.cameraSource.setDelegate(self.poseTracking, queue: self.poseTracking.videoQueue)
}
- public required init(){}
+ public required init(){
+
+
+ }
public func startCamera(_ completion: ((Result) -> Void)? = nil) {
diff --git a/mediapipe/swift/solutions/lindera/utils.swift b/mediapipe/swift/solutions/lindera/utils.swift
new file mode 100644
index 000000000..846d34214
--- /dev/null
+++ b/mediapipe/swift/solutions/lindera/utils.swift
@@ -0,0 +1,45 @@
+//
+// utils.swift
+// Mediapipe
+//
+// Created by Mautisim Munir on 21/10/2022.
+//
+
+import Foundation
+
+
+public class FPSHelper{
+ var smoothingFactor = 0.8
+ var _fps:Double? = nil
+ var time: CFAbsoluteTime? = nil
+ public var onFpsUpdate : ((_ fps:Double)->Void)? = nil
+ init(smoothingFactor:Double) {
+ self.smoothingFactor = smoothingFactor
+ }
+
+ public func logTime(){
+
+ let currTime = CFAbsoluteTimeGetCurrent()
+ if (time != nil){
+
+ let elapsedTime = currTime - time!
+ let fps = 1/Double(elapsedTime)
+ if (_fps == nil){
+ _fps = fps
+ }else{
+ _fps = (1-smoothingFactor)*fps + smoothingFactor*_fps!
+ }
+ if (onFpsUpdate != nil){
+ onFpsUpdate?(_fps!)
+ }
+
+ }
+ time = currTime
+
+ }
+
+
+
+
+
+}