added change model complexity support

This commit is contained in:
Mautisim Munir 2022-10-21 12:29:47 +05:00
parent 003df4fe47
commit 80815435ce
10 changed files with 126 additions and 39 deletions

View File

@ -13,10 +13,6 @@ swift_library(
name = "lindera_app_lib",
srcs = glob(["**/*.swift"]),
data =[
"//mediapipe/graphs/pose_tracking:pose_tracking_gpu.binarypb",
"//mediapipe/modules/pose_detection:pose_detection.tflite",
"//mediapipe/modules/pose_landmark:pose_landmark_full.tflite",
] + [
"Base.lproj/LaunchScreen.storyboard",
"Base.lproj/Main.storyboard",
],

View File

@ -34,6 +34,9 @@
<autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMaxY="YES"/>
<state key="normal" title="Button"/>
<buttonConfiguration key="configuration" style="plain" title="MODEL (LITE)"/>
<connections>
<action selector="setModelComplexity" destination="BYZ-38-t0r" eventType="touchDown" id="cVM-E4-dua"/>
</connections>
</button>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" fixedFrame="YES" text=" Copper Labs" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontForContentSizeCategory="YES" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="UGR-Ho-hUQ">
<rect key="frame" x="8" y="41" width="398" height="37"/>

View File

@ -24,8 +24,60 @@ class ViewController: UIViewController {
}
func updateModelButtonText(){
func updateModelButtonText(){
var text = "MODEL "
switch(lindera.getModelComplexity()){
case 0:
text += "(LITE)"
break;
case 1:
text += "(FULL)"
break;
case 2:
text += "(HEAVY)"
break;
default:
text += "(Unknown)"
}
chooseModelButton.setTitle(text, for: UIControl.State.normal)
}
@IBAction func setModelComplexity(){
let alert = UIAlertController(
title: nil,
message: nil,
preferredStyle: .actionSheet
)
alert.addAction(
.init(title: "MODEL (LITE)", style: .default) {[weak self] _ in
self?.lindera.setModelComplexityNow(complexity: 0)
self?.updateModelButtonText()
}
)
alert.addAction(
.init(title: "MODEL (FULL)", style: .default) { [weak self] _ in
self?.lindera.setModelComplexityNow(complexity: 1)
self?.updateModelButtonText()
}
)
alert.addAction(
.init(title: "MODEL (HEAVY)", style: .default) { [weak self] _ in
self?.lindera.setModelComplexityNow(complexity: 2)
self?.updateModelButtonText()
}
)
present(alert, animated: true)
}
@IBAction func showLandmarksButtonTouch(sender: UIButton){
@ -33,25 +85,7 @@ class ViewController: UIViewController {
lindera.showLandmarks(value: !lindera.areLandmarksShown());
updateLandmarksButtonText()
// let alert = UIAlertController(
// title: nil,
// message: nil,
// preferredStyle: .actionSheet
// )
//
// alert.addAction(
// .init(title: "Action 1", style: .default) { _ in
// print("Action1")
// }
// )
//
// alert.addAction(
// .init(title: "Action 2", style: .default) { _ in
// print("Action 2")
// }
// )
//
// present(alert, animated: true)
}
@ -100,7 +134,7 @@ class ViewController: UIViewController {
self.liveView.bringSubviewToFront(titleview)
updateLandmarksButtonText()
// self.liveView.bringSubviewToFront(chooseModelButton)
updateModelButtonText()
}

View File

@ -44,6 +44,8 @@ node {
node {
calculator: "PoseLandmarkGpu"
input_side_packet: "ENABLE_SEGMENTATION:enable_segmentation"
input_side_packet: "MODEL_COMPLEXITY:model_complexity"
input_stream: "IMAGE:throttled_input_video"
output_stream: "LANDMARKS:pose_landmarks"
output_stream: "SEGMENTATION_MASK:segmentation_mask"

View File

@ -13,6 +13,12 @@ objc_library(
copts = [
"-Wno-shorten-64-to-32",
],
data = [
"//mediapipe/graphs/pose_tracking:pose_tracking_gpu.binarypb",
"//mediapipe/modules/pose_detection:pose_detection.tflite",
"//mediapipe/modules/pose_landmark:pose_landmark_heavy.tflite",
"//mediapipe/modules/pose_landmark:pose_landmark_full.tflite",
"//mediapipe/modules/pose_landmark:pose_landmark_lite.tflite", ] ,
sdk_frameworks = ["Accelerate"],
visibility = ["//visibility:public"],
deps = [

View File

@ -47,6 +47,7 @@
- (void) startWithCamera: (MPPCameraInputSource*) cameraSource;
- (void)showLandmarks: (BOOL) value;
- (BOOL) areLandmarksShown;
- (void) stopGraph;
@end

View File

@ -2,7 +2,7 @@
#include "mediapipe/framework/formats/landmark.pb.h"
#import "mediapipe/objc/MPPGraph.h"
#import "mediapipe/objc/MPPTimestampConverter.h"
#include "mediapipe/framework/packet.h"
static const char* kVideoQueueLabel = "com.google.mediapipe.example.videoQueue";
static const char* kLandmarksOutputStream = "pose_landmarks";
@ -162,6 +162,10 @@ static const char* kLandmarksOutputStream = "pose_landmarks";
weakSelf.poseTrackingResultsListener(results);
};
self->mediapipeGraph.delegate = self->poseTrackingGraphDelegate;
@ -184,6 +188,7 @@ static const char* kLandmarksOutputStream = "pose_landmarks";
- (void)startGraph {
// Start running self.mediapipeGraph.
[self->mediapipeGraph setSidePacket:mediapipe::MakePacket<int>(self.poseTrackingOptions.modelComplexity) named:"model_complexity"];
NSError* error;
if (![self->mediapipeGraph startWithError:&error]) {
NSLog(@"Failed to start graph: %@", error);
@ -193,6 +198,20 @@ static const char* kLandmarksOutputStream = "pose_landmarks";
}
}
- (void) stopGraph {
[self->mediapipeGraph cancel];
NSError* error;
if ([self->mediapipeGraph closeAllInputStreamsWithError: &error]){
if (![self->mediapipeGraph waitUntilDoneWithError:&error]){
NSLog(@"Failed to stop graph: %@", error);
}
}else {
NSLog(@"Failed to close input streams: %@", error);
}
}
- (void) startWithCamera: (MPPCameraInputSource*) cameraSource {
[cameraSource setDelegate:self queue:self.videoQueue];

View File

@ -5,12 +5,16 @@
#ifndef MEDIAPIPE_POSETRACKINGOPTIONS_H
#define MEDIAPIPE_POSETRACKINGOPTIONS_H
#import <Foundation/Foundation.h>
@interface PoseTrackingOptions: NSObject
@interface PoseTrackingOptions: NSObject
@property(nonatomic) int modelComplexity;
@property(nonatomic) bool showLandmarks;
//@property(nonatomic) int cameraRotation;
- (instancetype) initWithShowLandmarks : (bool) showLandmarks;
- (instancetype) initWithShowLandmarks : (bool) showLandmarks modelComplexity: (int) modelComplexity;
@end

View File

@ -2,9 +2,10 @@
@implementation PoseTrackingOptions
- (instancetype) initWithShowLandmarks : (bool) showLandmarks {
- (instancetype) initWithShowLandmarks : (bool) showLandmarks modelComplexity: (int) modelComplexity{
// self.cameraRotation = cameraRotation;
self.showLandmarks = showLandmarks;
self.modelComplexity = modelComplexity;
return self;
}

View File

@ -7,7 +7,7 @@ import UIKit
/// TFLite models are also loaded when you initialize this class
public final class Lindera{
// initalize the PoseTracking api and load models
let poseTracking:PoseTracking = PoseTracking(poseTrackingOptions: PoseTrackingOptions(showLandmarks: true))
var poseTracking:PoseTracking = PoseTracking(poseTrackingOptions: PoseTrackingOptions(showLandmarks: true,modelComplexity: 1))
// attach Mediapipe camera helper to our class
let cameraSource = MPPCameraInputSource()
@ -26,6 +26,24 @@ public final class Lindera{
public func areLandmarksShown() -> Bool{
return self.poseTracking.areLandmarksShown()
}
public func getModelComplexity() -> Int {
return Int(self.poseTracking.poseTrackingOptions.modelComplexity);
}
public func setModelComplexityNow(complexity:Int){
let poseTrackingOptions = poseTracking.poseTrackingOptions
poseTrackingOptions?.modelComplexity = Int32(complexity)
poseTracking = PoseTracking(poseTrackingOptions: poseTrackingOptions)
startPoseTracking()
startCamera()
}
// public func getModelComplexity() -> Int{
// return self.poseTracking
// }
@ -35,6 +53,14 @@ public final class Lindera{
// this will be the main camera view
let liveView = UIView()
startPoseTracking()
return liveView
}()
private func startPoseTracking(){
// set camera preferences
self.cameraSource.sessionPreset = AVCaptureSession.Preset.high.rawValue
self.cameraSource.cameraPosition = AVCaptureDevice.Position.front
@ -53,16 +79,11 @@ public final class Lindera{
self.poseTracking.startGraph()
// attach camera's output with poseTracking object and its videoQueue
self.cameraSource.setDelegate(self.poseTracking, queue: self.poseTracking.videoQueue)
return liveView
}()
}
public required init(){}
public func startCamera(_ completion: ((Result<Void, Error>) -> Void)? = nil) {
if (!self.cameraSource.isRunning){
// set our rendering layer frame according to cameraView boundry
self.poseTracking.renderer.layer.frame = cameraView.layer.bounds
// attach render CALayer on cameraView to render output to
@ -84,7 +105,6 @@ public final class Lindera{
}
})
}
@ -98,6 +118,7 @@ public final class Lindera{
}
}
/// switches camera from front to back and vice versa
func switchCamera(_ completion: ((Result<Void, Error>) -> Void)? = nil) {
self.poseTracking.videoQueue.async { [weak self] in