working cocoa pod with framework
This commit is contained in:
parent
2c01e84955
commit
d29e3c01a9
|
@ -528,11 +528,11 @@ load("@build_bazel_rules_android//android:rules.bzl", "android_ndk_repository",
|
||||||
android_sdk_repository(
|
android_sdk_repository(
|
||||||
name = "androidsdk",
|
name = "androidsdk",
|
||||||
build_tools_version = "30.0.3",
|
build_tools_version = "30.0.3",
|
||||||
path = "/Users/tj/Library/Android/sdk", # Path to Android SDK, optional if $ANDROID_HOME is set
|
# path = "/Users/tj/Library/Android/sdk", # Path to Android SDK, optional if $ANDROID_HOME is set
|
||||||
)
|
)
|
||||||
|
|
||||||
android_ndk_repository(
|
android_ndk_repository(
|
||||||
name = "androidndk", # Required. Name *must* be "androidndk".
|
name = "androidndk", # Required. Name *must* be "androidndk".
|
||||||
api_level = 21,
|
api_level = 21,
|
||||||
path = "/Users/tj/Library/Android/sdk/ndk/21.4.7075529", # Optional. Can be omitted if `ANDROID_NDK_HOME` environment variable is set.
|
# path = "/Users/tj/Library/Android/sdk/ndk/21.4.7075529", # Optional. Can be omitted if `ANDROID_NDK_HOME` environment variable is set.
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,10 +1,12 @@
|
||||||
# Uncomment the next line to define a global platform for your project
|
# Uncomment the next line to define a global platform for your project
|
||||||
# platform :ios, '9.0'
|
# platform :ios, '9.0'
|
||||||
|
source 'https://github.com/copper-labs/CocoaSpecs.git'
|
||||||
|
|
||||||
target 'PoseTrackingLindera' do
|
target 'PoseTrackingLindera' do
|
||||||
# Comment the next line if you don't want to use dynamic frameworks
|
# Comment the next line if you don't want to use dynamic frameworks
|
||||||
use_frameworks!
|
use_frameworks!
|
||||||
pod 'LinderaDetection', :path => 'LinderaDetection'
|
|
||||||
|
pod 'LinderaDetection' #, :path => 'LinderaDetection'
|
||||||
|
|
||||||
# Pods for PoseTrackingLindera
|
# Pods for PoseTrackingLindera
|
||||||
|
|
||||||
|
|
Binary file not shown.
|
@ -6,6 +6,10 @@ load(
|
||||||
"ios_static_framework",
|
"ios_static_framework",
|
||||||
"ios_unit_test",
|
"ios_unit_test",
|
||||||
)
|
)
|
||||||
|
load(
|
||||||
|
"@build_bazel_rules_apple//apple:apple.bzl",
|
||||||
|
"apple_static_xcframework",
|
||||||
|
)
|
||||||
load(
|
load(
|
||||||
"//mediapipe/examples/ios:bundle_id.bzl",
|
"//mediapipe/examples/ios:bundle_id.bzl",
|
||||||
"BUNDLE_ID_PREFIX",
|
"BUNDLE_ID_PREFIX",
|
||||||
|
@ -67,10 +71,22 @@ ios_static_framework(
|
||||||
visibility = ["//visibility:public"],
|
visibility = ["//visibility:public"],
|
||||||
deps = [
|
deps = [
|
||||||
"//mediapipe/objc/solutions/posetracking_gpu:posetracking_gpu_solution",
|
"//mediapipe/objc/solutions/posetracking_gpu:posetracking_gpu_solution",
|
||||||
"//mediapipe/calculators/core:flow_limiter_calculator",
|
|
||||||
|
|
||||||
# "//third_party:opencv",
|
# "//third_party:opencv",
|
||||||
"@ios_opencv//:OpencvFramework",
|
# "@ios_opencv//:OpencvFramework",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
apple_static_xcframework(
|
||||||
|
name = "MPPoseTrackingXC",
|
||||||
|
bundle_name = "MPPoseTracking",
|
||||||
|
minimum_os_versions = {"ios": "12.0"},
|
||||||
|
public_hdrs = MPP_HEADERS + MP_GEN_IOS_HEADERS,
|
||||||
|
deps = [
|
||||||
|
"//mediapipe/objc/solutions/posetracking_gpu:posetracking_gpu_solution",
|
||||||
|
|
||||||
|
# "//third_party:opencv",
|
||||||
|
# "@ios_opencv//:OpencvFramework",
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -133,10 +149,15 @@ genrule(
|
||||||
|
|
||||||
objc_library(
|
objc_library(
|
||||||
name = "posetracking_gpu_solution",
|
name = "posetracking_gpu_solution",
|
||||||
srcs = glob([
|
srcs = [
|
||||||
"*.h",
|
"PoseTrackingOptions.mm",
|
||||||
"*.mm",
|
"PoseTrackingResults.mm",
|
||||||
]),
|
"PoseTracking.mm",
|
||||||
|
] + select({
|
||||||
|
"//mediapipe:ios_i386": [],
|
||||||
|
"//mediapipe:ios_x86_64": [],
|
||||||
|
"//conditions:default": [],
|
||||||
|
}),
|
||||||
hdrs = MPP_HEADERS + MP_GEN_IOS_HEADERS,
|
hdrs = MPP_HEADERS + MP_GEN_IOS_HEADERS,
|
||||||
copts = [
|
copts = [
|
||||||
"-Wno-shorten-64-to-32",
|
"-Wno-shorten-64-to-32",
|
||||||
|
@ -168,13 +189,13 @@ objc_library(
|
||||||
"//mediapipe/objc:mediapipe_framework_ios",
|
"//mediapipe/objc:mediapipe_framework_ios",
|
||||||
"//mediapipe/objc:mediapipe_input_sources_ios",
|
"//mediapipe/objc:mediapipe_input_sources_ios",
|
||||||
"//mediapipe/objc:mediapipe_layer_renderer",
|
"//mediapipe/objc:mediapipe_layer_renderer",
|
||||||
|
"//mediapipe/graphs/pose_tracking:pose_tracking_gpu_deps",
|
||||||
|
"//mediapipe/framework/formats:landmark_cc_proto",
|
||||||
|
"calculator_registry",
|
||||||
] + select({
|
] + select({
|
||||||
"//mediapipe:ios_i386": [],
|
"//mediapipe:ios_i386": [],
|
||||||
"//mediapipe:ios_x86_64": [],
|
"//mediapipe:ios_x86_64": [],
|
||||||
"//conditions:default": [
|
"//conditions:default": [
|
||||||
"//mediapipe/graphs/pose_tracking:pose_tracking_gpu_deps",
|
|
||||||
"//mediapipe/framework/formats:landmark_cc_proto",
|
|
||||||
"calculator_registry",
|
|
||||||
],
|
],
|
||||||
}),
|
}),
|
||||||
alwayslink = True,
|
alwayslink = True,
|
||||||
|
|
|
@ -20,10 +20,13 @@ genrule(
|
||||||
srcs = [
|
srcs = [
|
||||||
"//mediapipe/objc/solutions/posetracking_gpu:MPPoseTrackingHeaderPatched",
|
"//mediapipe/objc/solutions/posetracking_gpu:MPPoseTrackingHeaderPatched",
|
||||||
"LinderaDetection.podspec",
|
"LinderaDetection.podspec",
|
||||||
|
"@ios_opencv//:OpencvFrameworkContents",
|
||||||
] + glob(["*.swift"]),
|
] + glob(["*.swift"]),
|
||||||
outs = ["LinderaDetection.zip"],
|
outs = ["LinderaDetection.zip"],
|
||||||
cmd = """
|
cmd = """
|
||||||
|
|
||||||
mkdir mediapipe/swift/solutions/lindera/frameworks
|
mkdir mediapipe/swift/solutions/lindera/frameworks
|
||||||
|
cp -r external/ios_opencv/opencv2.framework mediapipe/swift/solutions/lindera/frameworks
|
||||||
unzip $(location //mediapipe/objc/solutions/posetracking_gpu:MPPoseTrackingHeaderPatched) -d mediapipe/swift/solutions/lindera/frameworks
|
unzip $(location //mediapipe/objc/solutions/posetracking_gpu:MPPoseTrackingHeaderPatched) -d mediapipe/swift/solutions/lindera/frameworks
|
||||||
cd mediapipe/swift/solutions/lindera/
|
cd mediapipe/swift/solutions/lindera/
|
||||||
|
|
||||||
|
|
|
@ -1,31 +1,32 @@
|
||||||
// This is the copperlabs posetracking api built in objective c
|
// This is the copperlabs posetracking api built in objective c
|
||||||
import MPPoseTracking
|
|
||||||
import UIKit
|
import UIKit
|
||||||
|
#if arch(arm64)
|
||||||
|
import MPPoseTracking
|
||||||
|
|
||||||
|
|
||||||
/// A helper class to run the Pose Tracking API
|
/// A helper class to run the Pose Tracking API
|
||||||
/// TFLite models are also loaded when you initialize this class
|
/// TFLite models are also loaded when you initialize this class
|
||||||
public final class Lindera{
|
public final class Lindera{
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
//MARK: - Public Class API
|
//MARK: - Public Class API
|
||||||
|
|
||||||
|
|
||||||
// A delegate to handle results
|
// A delegate to handle results
|
||||||
public weak var delegate: LinderaDelegate?
|
public weak var delegate: LinderaDelegate?
|
||||||
|
|
||||||
/// This function sets up your callback function to happen whenver there is an fps update
|
/// This function sets up your callback function to happen whenver there is an fps update
|
||||||
public func setFpsDelegate(fpsDelegate: @escaping (_ fps:Double)->Void){
|
public func setFpsDelegate(fpsDelegate: @escaping (_ fps:Double)->Void){
|
||||||
fpsHelper.onFpsUpdate = fpsDelegate;
|
fpsHelper.onFpsUpdate = fpsDelegate;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get the camera UI View that may contain landmarks drawing
|
// Get the camera UI View that may contain landmarks drawing
|
||||||
public var cameraView: UIView {
|
public var cameraView: UIView {
|
||||||
return self.linderaExerciseSession
|
return self.linderaExerciseSession
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
// Show Landmarks - works instantaneously!
|
// Show Landmarks - works instantaneously!
|
||||||
public func showLandmarks(value:Bool){
|
public func showLandmarks(value:Bool){
|
||||||
self.poseTracking.showLandmarks(value)
|
self.poseTracking.showLandmarks(value)
|
||||||
|
@ -38,50 +39,50 @@ public final class Lindera{
|
||||||
public func getModelComplexity() -> Int {
|
public func getModelComplexity() -> Int {
|
||||||
return Int(self.poseTracking.poseTrackingOptions.modelComplexity);
|
return Int(self.poseTracking.poseTrackingOptions.modelComplexity);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set the model complexity and restart detection to load new models
|
// Set the model complexity and restart detection to load new models
|
||||||
public func setModelComplexityNow(complexity:Int){
|
public func setModelComplexityNow(complexity:Int){
|
||||||
let poseTrackingOptions = poseTracking.poseTrackingOptions
|
let poseTrackingOptions = poseTracking.poseTrackingOptions
|
||||||
|
|
||||||
poseTrackingOptions?.modelComplexity = Int32(complexity)
|
poseTrackingOptions?.modelComplexity = Int32(complexity)
|
||||||
|
|
||||||
poseTracking = PoseTracking(poseTrackingOptions: poseTrackingOptions)
|
poseTracking = PoseTracking(poseTrackingOptions: poseTrackingOptions)
|
||||||
startPoseTracking()
|
startPoseTracking()
|
||||||
startCamera()
|
startCamera()
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public required init(){
|
public required init(){
|
||||||
|
|
||||||
startPoseTracking()
|
startPoseTracking()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public func startCamera(_ completion: ((Result<Void, Error>) -> Void)? = nil) {
|
public func startCamera(_ completion: ((Result<Void, Error>) -> Void)? = nil) {
|
||||||
// set our rendering layer frame according to cameraView boundry
|
// set our rendering layer frame according to cameraView boundry
|
||||||
self.poseTracking.renderer.layer.frame = cameraView.layer.bounds
|
self.poseTracking.renderer.layer.frame = cameraView.layer.bounds
|
||||||
// attach render CALayer on cameraView to render output to
|
// attach render CALayer on cameraView to render output to
|
||||||
self.cameraView.layer.addSublayer(self.poseTracking.renderer.layer)
|
self.cameraView.layer.addSublayer(self.poseTracking.renderer.layer)
|
||||||
|
|
||||||
self.cameraSource.requestCameraAccess(
|
self.cameraSource.requestCameraAccess(
|
||||||
completionHandler: {(granted:Bool)->Void in
|
completionHandler: {(granted:Bool)->Void in
|
||||||
if (granted){
|
if (granted){
|
||||||
self.poseTracking.videoQueue.async(execute:{ [weak self] in
|
self.poseTracking.videoQueue.async(execute:{ [weak self] in
|
||||||
|
|
||||||
self?.cameraSource.start()
|
self?.cameraSource.start()
|
||||||
|
|
||||||
} )
|
} )
|
||||||
completion?(.success(Void()))
|
completion?(.success(Void()))
|
||||||
}else{
|
}else{
|
||||||
|
|
||||||
completion?(.failure(preconditionFailure("Camera Access Not Granted")))
|
completion?(.failure(preconditionFailure("Camera Access Not Granted")))
|
||||||
|
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
/// Choose front or back camera. Must restart camera after use if already started
|
/// Choose front or back camera. Must restart camera after use if already started
|
||||||
public func selectCamera(_ position: AVCaptureDevice.Position, _ completion: ((Result<Void, Error>) -> Void)? = nil) {
|
public func selectCamera(_ position: AVCaptureDevice.Position, _ completion: ((Result<Void, Error>) -> Void)? = nil) {
|
||||||
|
@ -89,23 +90,23 @@ public final class Lindera{
|
||||||
self?.cameraSource.cameraPosition = position
|
self?.cameraSource.cameraPosition = position
|
||||||
completion?(.success(Void()))
|
completion?(.success(Void()))
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
// MARK: - Private Class Functions
|
// MARK: - Private Class Functions
|
||||||
|
|
||||||
// Set your custom view heree
|
// Set your custom view heree
|
||||||
private lazy var linderaExerciseSession: UIView = {
|
private lazy var linderaExerciseSession: UIView = {
|
||||||
|
|
||||||
// this will be the main camera view; Change it to custom view class to get desired results
|
// this will be the main camera view; Change it to custom view class to get desired results
|
||||||
let liveView = UIView()
|
let liveView = UIView()
|
||||||
|
|
||||||
return liveView
|
return liveView
|
||||||
|
|
||||||
}()
|
}()
|
||||||
|
|
||||||
|
|
||||||
private func startPoseTracking(){
|
private func startPoseTracking(){
|
||||||
// set camera preferences
|
// set camera preferences
|
||||||
self.cameraSource.sessionPreset = AVCaptureSession.Preset.high.rawValue
|
self.cameraSource.sessionPreset = AVCaptureSession.Preset.high.rawValue
|
||||||
|
@ -116,43 +117,43 @@ public final class Lindera{
|
||||||
}
|
}
|
||||||
// call LinderaDelegate on pose tracking results
|
// call LinderaDelegate on pose tracking results
|
||||||
self.poseTracking.poseTrackingResultsListener = {[weak self] results in
|
self.poseTracking.poseTrackingResultsListener = {[weak self] results in
|
||||||
|
|
||||||
|
|
||||||
guard let self = self, let results = results else {
|
guard let self = self, let results = results else {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
self.delegate?.lindera(self, didDetect: .init(pose: Asensei3DPose.init(results), timestamp: CMTimeGetSeconds(self.poseTracking.timeStamp)))
|
self.delegate?.lindera(self, didDetect: .init(pose: Asensei3DPose.init(results), timestamp: CMTimeGetSeconds(self.poseTracking.timeStamp)))
|
||||||
}
|
}
|
||||||
self.poseTracking.graphOutputStreamListener = {[weak self] in
|
self.poseTracking.graphOutputStreamListener = {[weak self] in
|
||||||
self?.fpsHelper.logTime()
|
self?.fpsHelper.logTime()
|
||||||
}
|
}
|
||||||
|
|
||||||
self.poseTracking.startGraph()
|
self.poseTracking.startGraph()
|
||||||
// attach camera's output with poseTracking object and its videoQueue
|
// attach camera's output with poseTracking object and its videoQueue
|
||||||
self.cameraSource.setDelegate(self.poseTracking, queue: self.poseTracking.videoQueue)
|
self.cameraSource.setDelegate(self.poseTracking, queue: self.poseTracking.videoQueue)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
func stopCamera(){
|
func stopCamera(){
|
||||||
if (self.cameraSource.isRunning){
|
if (self.cameraSource.isRunning){
|
||||||
self.poseTracking.videoQueue.async { [weak self] in
|
self.poseTracking.videoQueue.async { [weak self] in
|
||||||
self?.cameraSource.stop()
|
self?.cameraSource.stop()
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// switches camera from front to back and vice versa
|
/// switches camera from front to back and vice versa
|
||||||
func switchCamera(_ completion: ((Result<Void, Error>) -> Void)? = nil) {
|
func switchCamera(_ completion: ((Result<Void, Error>) -> Void)? = nil) {
|
||||||
self.poseTracking.videoQueue.async { [weak self] in
|
self.poseTracking.videoQueue.async { [weak self] in
|
||||||
if let self = self {
|
if let self = self {
|
||||||
|
|
||||||
self.stopCamera()
|
self.stopCamera()
|
||||||
self.startCamera(completion)
|
self.startCamera(completion)
|
||||||
|
|
||||||
switch(self.cameraSource.cameraPosition){
|
switch(self.cameraSource.cameraPosition){
|
||||||
|
|
||||||
case .unspecified:
|
case .unspecified:
|
||||||
completion?(.failure(preconditionFailure("Unkown Camera Position")))
|
completion?(.failure(preconditionFailure("Unkown Camera Position")))
|
||||||
case .back:
|
case .back:
|
||||||
|
@ -161,34 +162,34 @@ public final class Lindera{
|
||||||
self.selectCamera(AVCaptureDevice.Position.back,completion)
|
self.selectCamera(AVCaptureDevice.Position.back,completion)
|
||||||
@unknown default:
|
@unknown default:
|
||||||
completion?(.failure(preconditionFailure("Unkown Camera Position")))
|
completion?(.failure(preconditionFailure("Unkown Camera Position")))
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
// MARK: - Private Class Objects
|
// MARK: - Private Class Objects
|
||||||
// initalize the PoseTracking api and load models
|
// initalize the PoseTracking api and load models
|
||||||
var poseTracking:PoseTracking = PoseTracking(poseTrackingOptions: PoseTrackingOptions(showLandmarks: true,modelComplexity: 1))
|
var poseTracking:PoseTracking = PoseTracking(poseTrackingOptions: PoseTrackingOptions(showLandmarks: true,modelComplexity: 1))
|
||||||
|
|
||||||
// Needed to get fps of model
|
// Needed to get fps of model
|
||||||
let fpsHelper = FPSHelper(smoothingFactor: 0.95)
|
let fpsHelper = FPSHelper(smoothingFactor: 0.95)
|
||||||
|
|
||||||
// attach Mediapipe camera helper to our class
|
// attach Mediapipe camera helper to our class
|
||||||
let cameraSource = MPPCameraInputSource()
|
let cameraSource = MPPCameraInputSource()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public protocol LinderaDelegate: AnyObject {
|
public protocol LinderaDelegate: AnyObject {
|
||||||
|
|
||||||
func lindera(_ lindera: Lindera, didDetect event: Asensei3DPose.Event)
|
func lindera(_ lindera: Lindera, didDetect event: Asensei3DPose.Event)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -199,61 +200,61 @@ func landmarkToBodyJointDetails(landmark: PoseLandmark) -> Asensei3DPose.BodyJoi
|
||||||
}
|
}
|
||||||
// MARK: - Helpers
|
// MARK: - Helpers
|
||||||
extension Asensei3DPose {
|
extension Asensei3DPose {
|
||||||
|
|
||||||
init(_ pose: PoseTrackingResults) {
|
init(_ pose: PoseTrackingResults) {
|
||||||
|
|
||||||
self.nose = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_NOSE])
|
self.nose = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_NOSE])
|
||||||
|
|
||||||
self.leftEyeInner = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_EYE_INNER])
|
self.leftEyeInner = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_EYE_INNER])
|
||||||
self.leftEye = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_EYE])
|
self.leftEye = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_EYE])
|
||||||
self.leftEyeOuter = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_EYE_OUTER])
|
self.leftEyeOuter = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_EYE_OUTER])
|
||||||
|
|
||||||
self.rightEyeInner = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_EYE_OUTER])
|
self.rightEyeInner = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_EYE_OUTER])
|
||||||
self.rightEye = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_EYE])
|
self.rightEye = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_EYE])
|
||||||
self.rightEyeOuter = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_EYE_OUTER])
|
self.rightEyeOuter = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_EYE_OUTER])
|
||||||
|
|
||||||
self.leftEar = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_EAR])
|
self.leftEar = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_EAR])
|
||||||
self.rightEar = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_EAR])
|
self.rightEar = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_EAR])
|
||||||
|
|
||||||
self.mouthLeft = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_MOUTH_LEFT])
|
self.mouthLeft = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_MOUTH_LEFT])
|
||||||
self.mouthRight = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_MOUTH_RIGHT])
|
self.mouthRight = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_MOUTH_RIGHT])
|
||||||
|
|
||||||
self.leftShoulder = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_SHOULDER])
|
self.leftShoulder = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_SHOULDER])
|
||||||
self.rightShoulder = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_SHOULDER])
|
self.rightShoulder = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_SHOULDER])
|
||||||
|
|
||||||
self.leftElbow = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_ELBOW])
|
self.leftElbow = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_ELBOW])
|
||||||
self.rightElbow = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_ELBOW])
|
self.rightElbow = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_ELBOW])
|
||||||
|
|
||||||
self.leftWrist = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_WRIST])
|
self.leftWrist = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_WRIST])
|
||||||
self.rightWrist = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_WRIST])
|
self.rightWrist = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_WRIST])
|
||||||
|
|
||||||
self.leftPinky = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_PINKY])
|
self.leftPinky = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_PINKY])
|
||||||
self.rightPinky = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_PINKY])
|
self.rightPinky = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_PINKY])
|
||||||
|
|
||||||
self.leftIndex = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_INDEX])
|
self.leftIndex = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_INDEX])
|
||||||
self.rightIndex = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_INDEX])
|
self.rightIndex = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_INDEX])
|
||||||
|
|
||||||
self.leftThumb = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_THUMB])
|
self.leftThumb = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_THUMB])
|
||||||
self.rightThumb = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_THUMB])
|
self.rightThumb = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_THUMB])
|
||||||
|
|
||||||
self.leftHip = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_HIP])
|
self.leftHip = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_HIP])
|
||||||
self.rightHip = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_HIP])
|
self.rightHip = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_HIP])
|
||||||
|
|
||||||
self.leftKnee = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_KNEE])
|
self.leftKnee = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_KNEE])
|
||||||
self.rightKnee = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_KNEE])
|
self.rightKnee = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_KNEE])
|
||||||
|
|
||||||
self.rightAnkle = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_ANKLE])
|
self.rightAnkle = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_ANKLE])
|
||||||
self.leftAnkle = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_ANKLE])
|
self.leftAnkle = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_ANKLE])
|
||||||
|
|
||||||
|
|
||||||
self.rightHeel = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_HEEL])
|
self.rightHeel = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_HEEL])
|
||||||
self.leftHeel = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_HEEL])
|
self.leftHeel = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_HEEL])
|
||||||
|
|
||||||
self.rightFoot = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_FOOT])
|
self.rightFoot = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_RIGHT_FOOT])
|
||||||
self.leftFoot = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_FOOT])
|
self.leftFoot = landmarkToBodyJointDetails(landmark: pose.landmarks[POSE_LEFT_FOOT])
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -265,3 +266,6 @@ extension Asensei3DPose {
|
||||||
// self.z = vector.y
|
// self.z = vector.y
|
||||||
// }
|
// }
|
||||||
//}
|
//}
|
||||||
|
#else
|
||||||
|
final public class Lindera{}
|
||||||
|
#endif
|
||||||
|
|
|
@ -16,15 +16,15 @@ Pod::Spec.new do |spec|
|
||||||
#
|
#
|
||||||
|
|
||||||
spec.name = "LinderaDetection"
|
spec.name = "LinderaDetection"
|
||||||
spec.version = "0.0.1"
|
spec.version = "0.0.2"
|
||||||
spec.summary = "LinderaDetection is a simple yet powerful interface to run AI Health Solutions"
|
spec.summary = "LinderaDetection is a simple yet powerful interface to run AI Fitness Solutions"
|
||||||
|
|
||||||
# This description is used to generate tags and improve search results.
|
# This description is used to generate tags and improve search results.
|
||||||
# * Think: What does it do? Why did you write it? What is the focus?
|
# * Think: What does it do? Why did you write it? What is the focus?
|
||||||
# * Try to keep it short, snappy and to the point.
|
# * Try to keep it short, snappy and to the point.
|
||||||
# * Write the description between the DESC delimiters below.
|
# * Write the description between the DESC delimiters below.
|
||||||
# * Finally, don't worry about the indent, CocoaPods strips it!
|
# * Finally, don't worry about the indent, CocoaPods strips it!
|
||||||
spec.description = "LinderaDetection is a simple yet powerful interface to run AI Health Solutions"
|
spec.description = "LinderaDetection is a simple yet powerful interface to run AI Fitness Solutions. It is powered by Mediapipe."
|
||||||
|
|
||||||
spec.homepage = "https://github.com/udamaster/mediapipe"
|
spec.homepage = "https://github.com/udamaster/mediapipe"
|
||||||
# spec.screenshots = "www.example.com/screenshots_1.gif", "www.example.com/screenshots_2.gif"
|
# spec.screenshots = "www.example.com/screenshots_1.gif", "www.example.com/screenshots_2.gif"
|
||||||
|
@ -37,8 +37,11 @@ Pod::Spec.new do |spec|
|
||||||
# Popular ones are 'MIT', 'BSD' and 'Apache License, Version 2.0'.
|
# Popular ones are 'MIT', 'BSD' and 'Apache License, Version 2.0'.
|
||||||
#
|
#
|
||||||
|
|
||||||
spec.license = "MIT (example)"
|
spec.license = { :type => 'MIT', :text => <<-LICENSE
|
||||||
spec.license = { :type => "MIT"}
|
Copyright 2012
|
||||||
|
Permission is granted to...
|
||||||
|
LICENSE
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
# ――― Author Metadata ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
|
# ――― Author Metadata ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
|
||||||
|
@ -59,7 +62,7 @@ Pod::Spec.new do |spec|
|
||||||
# If this Pod runs only on iOS or OS X, then specify the platform and
|
# If this Pod runs only on iOS or OS X, then specify the platform and
|
||||||
# the deployment target. You can optionally include the target after the platform.
|
# the deployment target. You can optionally include the target after the platform.
|
||||||
#
|
#
|
||||||
|
spec.swift_versions = ["4.0"]
|
||||||
# spec.platform = :ios
|
# spec.platform = :ios
|
||||||
spec.platform = :ios, "12.0"
|
spec.platform = :ios, "12.0"
|
||||||
|
|
||||||
|
@ -74,9 +77,12 @@ Pod::Spec.new do |spec|
|
||||||
#
|
#
|
||||||
# Specify the location from where the source should be retrieved.
|
# Specify the location from where the source should be retrieved.
|
||||||
# Supports git, hg, bzr, svn and HTTP.
|
# Supports git, hg, bzr, svn and HTTP.
|
||||||
#
|
|
||||||
|
spec.source = { :http => 'https://github.com/copper-labs/iOSFramework/releases/download/0.1.0/LinderaDetection.zip' }
|
||||||
|
|
||||||
spec.source = { :http => 'https://edge-engine-store.s3.amazonaws.com/libs/ios/EdgeEngine/pod/EdgeEngine.zip' }
|
# for quickly testing locally
|
||||||
|
# spec.source = { :http => 'http://127.0.0.1:8000/LinderaDetection.zip' }
|
||||||
|
|
||||||
|
|
||||||
# ――― Source Code ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
|
# ――― Source Code ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
|
||||||
#
|
#
|
||||||
|
@ -128,14 +134,21 @@ Pod::Spec.new do |spec|
|
||||||
# spec.requires_arc = true
|
# spec.requires_arc = true
|
||||||
|
|
||||||
# spec.xcconfig = { "HEADER_SEARCH_PATHS" => "$(SDKROOT)/usr/include/libxml2" }
|
# spec.xcconfig = { "HEADER_SEARCH_PATHS" => "$(SDKROOT)/usr/include/libxml2" }
|
||||||
spec.dependency "OpenCV", "3.2"
|
# spec.dependency "OpenCV", "3.2"
|
||||||
spec.static_framework = true
|
spec.static_framework = true
|
||||||
|
# spec.preserve_paths = "frameworks/**/*"
|
||||||
spec.ios.vendored_frameworks = 'frameworks/*.framework'
|
spec.ios.vendored_frameworks = 'frameworks/*.framework'
|
||||||
# spec.pod_target_xcconfig = { 'OTHER_LDFLAGS' => '-lc++' }
|
spec.pod_target_xcconfig = { 'EXCLUDED_ARCHS[sdk=iphonesimulator*]' => 'arm64' ,
|
||||||
# spec.user_target_xcconfig = {'OTHER_LDFLAGS' => '-lc++' }
|
'OTHER_LDFLAGS' => '$(inherited) -force_load $(PODS_ROOT)/LinderaDetection/frameworks/MPPoseTracking.framework/MPPoseTracking' }
|
||||||
|
spec.user_target_xcconfig = {
|
||||||
|
'EXCLUDED_ARCHS[sdk=iphonesimulator*]' => 'arm64' ,
|
||||||
|
'OTHER_LDFLAGS' => '$(inherited) -force_load $(PODS_ROOT)/LinderaDetection/frameworks/MPPoseTracking.framework/MPPoseTracking' }
|
||||||
spec.libraries = 'stdc++'
|
spec.libraries = 'stdc++'
|
||||||
# ――― Temporary Architecture fixes
|
|
||||||
spec.user_target_xcconfig = { 'EXCLUDED_ARCHS[sdk=iphonesimulator*]' => 'arm64' }
|
|
||||||
spec.pod_target_xcconfig = { 'EXCLUDED_ARCHS[sdk=iphonesimulator*]' => 'arm64' }
|
# spec.xcconfig = {
|
||||||
|
# 'FRAMEWORK_SEARCH_PATH[sdk=iphoneos*]' => '$(inherited) "$(PODS_ROOT)/frameworks"',
|
||||||
|
# 'OTHERCFLAGS[sdk=iphoneos*]' => '$(inherited) -iframework "$(PODS_ROOT)/frameworks"',
|
||||||
|
# 'OTHER_LDFLAGS[sdk=iphoneos*]' => '$(inherited) -framework frameworks'
|
||||||
|
# }
|
||||||
end
|
end
|
||||||
|
|
13
mediapipe/swift/solutions/lindera/README.md
Normal file
13
mediapipe/swift/solutions/lindera/README.md
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
## CocoaPods
|
||||||
|
|
||||||
|
### Building Pod zipfile
|
||||||
|
```shell
|
||||||
|
bazel build -c opt --config=ios_fat --cxxopt=--std=c++17 --copt=-fembed-bitcode //mediapipe/swift/solutions/lindera:podgen
|
||||||
|
```
|
||||||
|
|
||||||
|
### Pushing Pods
|
||||||
|
|
||||||
|
here clspecs is the name of pod specs repository
|
||||||
|
```shell
|
||||||
|
pod repo push clspecs LinderaDetection.podspec --skip-import-validation
|
||||||
|
```
|
6
third_party/opencv_ios.BUILD
vendored
6
third_party/opencv_ios.BUILD
vendored
|
@ -10,6 +10,12 @@ load(
|
||||||
"apple_static_framework_import",
|
"apple_static_framework_import",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
filegroup(
|
||||||
|
name = "OpencvFrameworkContents",
|
||||||
|
srcs = glob(["opencv2.framework/**"]),
|
||||||
|
visibility = ["//visibility:public"],
|
||||||
|
)
|
||||||
|
|
||||||
apple_static_framework_import(
|
apple_static_framework_import(
|
||||||
name = "OpencvFramework",
|
name = "OpencvFramework",
|
||||||
framework_imports = glob(["opencv2.framework/**"]),
|
framework_imports = glob(["opencv2.framework/**"]),
|
||||||
|
|
Loading…
Reference in New Issue
Block a user