Merge pull request #4254 from priankakariatyml:ios-gesture-recognizer

PiperOrigin-RevId: 523808915
This commit is contained in:
Copybara-Service 2023-04-12 14:40:31 -07:00
commit dd62b0831a
17 changed files with 811 additions and 0 deletions

View File

@ -54,3 +54,9 @@ objc_library(
"//third_party/apple_frameworks:UIKit", "//third_party/apple_frameworks:UIKit",
], ],
) )
objc_library(
name = "MPPLandmark",
srcs = ["sources/MPPLandmark.m"],
hdrs = ["sources/MPPLandmark.h"],
)

View File

@ -0,0 +1,126 @@
// Copyright 2023 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#import <Foundation/Foundation.h>
NS_ASSUME_NONNULL_BEGIN
/**
* Landmark represents a point in 3D space with x, y, z coordinates. The landmark coordinates are in
* meters. z represents the landmark depth, and the smaller the value the closer the world landmark
* is to the camera.
*/
NS_SWIFT_NAME(Landmark)
@interface MPPLandmark : NSObject
/** The x coordinates of the landmark. */
@property(nonatomic, readonly) float x;
/** The y coordinates of the landmark. */
@property(nonatomic, readonly) float y;
/** The z coordinates of the landmark. */
@property(nonatomic, readonly) float z;
/**
* Landmark visibility. Should be `nil` if not supported. Float score of whether landmark is visible
* or occluded by other objects. Landmark considered as invisible also if it is not present on the
* screen (out of scene bounds). Depending on the model, visibility value is either a sigmoid or an
* argument of sigmoid.
*/
@property(nonatomic, readonly, nullable) NSNumber *visibility;
/**
* Landmark presence. Should stay unset if not supported. Float score of whether landmark is present
* on the scene (located within scene bounds). Depending on the model, presence value is either a
* result of sigmoid or an argument of sigmoid function to get landmark presence probability.
*/
@property(nonatomic, readonly, nullable) NSNumber *presence;
/**
* Initializes a new `MPPLandmark` object with the given x, y and z coordinates.
*
* @param x The x coordinates of the landmark.
* @param y The y coordinates of the landmark.
* @param z The z coordinates of the landmark.
*
* @return An instance of `MPPLandmark` initialized with the given x, y and z coordinates.
*/
- (instancetype)initWithX:(float)x
y:(float)y
z:(float)z
visibility:(nullable NSNumber *)visibility
presence:(nullable NSNumber *)presence NS_DESIGNATED_INITIALIZER;
- (instancetype)init NS_UNAVAILABLE;
+ (instancetype)new NS_UNAVAILABLE;
@end
/**
* Normalized Landmark represents a point in 3D space with x, y, z coordinates. x and y are
* normalized to [0.0, 1.0] by the image width and height respectively. z represents the landmark
* depth, and the smaller the value the closer the landmark is to the camera. The magnitude of z
* uses roughly the same scale as x.
*/
NS_SWIFT_NAME(NormalizedLandmark)
@interface MPPNormalizedLandmark : NSObject
/** The x coordinates of the landmark. */
@property(nonatomic, readonly) float x;
/** The y coordinates of the landmark. */
@property(nonatomic, readonly) float y;
/** The z coordinates of the landmark. */
@property(nonatomic, readonly) float z;
/**
* Landmark visibility. Should be `nil` if not supported. Float score of whether landmark is visible
* or occluded by other objects. Landmark considered as invisible also if it is not present on the
* screen (out of scene bounds). Depending on the model, visibility value is either a sigmoid or an
* argument of sigmoid.
*/
@property(nonatomic, readonly, nullable) NSNumber *visibility;
/**
* Landmark presence. Should stay unset if not supported. Float score of whether landmark is present
* on the scene (located within scene bounds). Depending on the model, presence value is either a
* result of sigmoid or an argument of sigmoid function to get landmark presence probability.
*/
@property(nonatomic, readonly, nullable) NSNumber *presence;
/**
* Initializes a new `MPPNormalizedLandmark` object with the given x, y and z coordinates.
*
* @param x The x coordinates of the landmark.
* @param y The y coordinates of the landmark.
* @param z The z coordinates of the landmark.
*
* @return An instance of `MPPNormalizedLandmark` initialized with the given x, y and z coordinates.
*/
- (instancetype)initWithX:(float)x
y:(float)y
z:(float)z
visibility:(nullable NSNumber *)visibility
presence:(nullable NSNumber *)presence NS_DESIGNATED_INITIALIZER;
- (instancetype)init NS_UNAVAILABLE;
+ (instancetype)new NS_UNAVAILABLE;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,105 @@
// Copyright 2023 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#import "mediapipe/tasks/ios/components/containers/sources/MPPLandmark.h"
static const float kFloatDifferenceTolerance = 1e-6f;
@implementation MPPLandmark
- (instancetype)initWithX:(float)x
y:(float)y
z:(float)z
visibility:(NSNumber *)visibility
presence:(NSNumber *)presence {
self = [super init];
if (self) {
_x = x;
_y = y;
_z = z;
_visibility = visibility;
_presence = presence;
}
return self;
}
- (NSUInteger)hash {
return @(self.x).hash ^ @(self.y).hash ^ @(self.z).hash;
}
- (BOOL)isEqual:(nullable id)object {
if (!object) {
return NO;
}
if (self == object) {
return YES;
}
if (![object isKindOfClass:[MPPLandmark class]]) {
return NO;
}
MPPLandmark *otherLandmark = (MPPLandmark *)object;
return fabsf(otherLandmark.x - self.x) < kFloatDifferenceTolerance &&
fabsf(otherLandmark.y - self.y) < kFloatDifferenceTolerance &&
fabsf(otherLandmark.z - self.z) < kFloatDifferenceTolerance;
}
@end
@implementation MPPNormalizedLandmark
- (instancetype)initWithX:(float)x
y:(float)y
z:(float)z
visibility:(NSNumber *)visibility
presence:(NSNumber *)presence {
self = [super init];
if (self) {
_x = x;
_y = y;
_z = z;
_visibility = visibility;
_presence = presence;
}
return self;
}
- (NSUInteger)hash {
return @(self.x).hash ^ @(self.y).hash ^ @(self.z).hash;
}
- (BOOL)isEqual:(nullable id)object {
if (!object) {
return NO;
}
if (self == object) {
return YES;
}
if (![object isKindOfClass:[MPPNormalizedLandmark class]]) {
return NO;
}
MPPNormalizedLandmark *otherLandmark = (MPPNormalizedLandmark *)object;
return fabsf(otherLandmark.x - self.x) < kFloatDifferenceTolerance &&
fabsf(otherLandmark.y - self.y) < kFloatDifferenceTolerance &&
fabsf(otherLandmark.z - self.z) < kFloatDifferenceTolerance;
}
@end

View File

@ -73,3 +73,14 @@ objc_library(
"//mediapipe/tasks/ios/components/containers:MPPDetection", "//mediapipe/tasks/ios/components/containers:MPPDetection",
], ],
) )
objc_library(
name = "MPPLandmarkHelpers",
srcs = ["sources/MPPLandmark+Helpers.mm"],
hdrs = ["sources/MPPLandmark+Helpers.h"],
deps = [
"//mediapipe/framework/formats:landmark_cc_proto",
"//mediapipe/tasks/ios/common/utils:NSStringHelpers",
"//mediapipe/tasks/ios/components/containers:MPPLandmark",
],
)

View File

@ -0,0 +1,33 @@
// Copyright 2023 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mediapipe/framework/formats/landmark.pb.h"
#import "mediapipe/tasks/ios/components/containers/sources/MPPLandmark.h"
NS_ASSUME_NONNULL_BEGIN
@interface MPPLandmark (Helpers)
+ (MPPLandmark *)landmarkWithProto:(const ::mediapipe::Landmark &)landmarkProto;
@end
@interface MPPNormalizedLandmark (Helpers)
+ (MPPNormalizedLandmark *)normalizedLandmarkWithProto:
(const ::mediapipe::NormalizedLandmark &)normalizedLandmarkProto;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,51 @@
// Copyright 2023 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#import "mediapipe/tasks/ios/components/containers/utils/sources/MPPLandmark+Helpers.h"
#import "mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.h"
namespace {
using LandmarkProto = ::mediapipe::Landmark;
using NormalizedLandmarkProto = ::mediapipe::NormalizedLandmark;
} // namespace
@implementation MPPLandmark (Helpers)
+ (MPPLandmark *)landmarkWithProto:(const ::mediapipe::Landmark &)landmarkProto {
return [[MPPLandmark alloc]
initWithX:landmarkProto.x()
y:landmarkProto.y()
z:landmarkProto.z()
visibility:landmarkProto.has_visibility() ? @(landmarkProto.visibility()) : nil
presence:landmarkProto.has_presence() ? @(landmarkProto.presence()) : nil];
}
@end
@implementation MPPNormalizedLandmark (Helpers)
+ (MPPNormalizedLandmark *)normalizedLandmarkWithProto:
(const ::mediapipe::NormalizedLandmark &)normalizedLandmarkProto {
return [[MPPNormalizedLandmark alloc]
initWithX:normalizedLandmarkProto.x()
y:normalizedLandmarkProto.y()
z:normalizedLandmarkProto.z()
visibility:normalizedLandmarkProto.has_visibility() ? @(normalizedLandmarkProto.visibility())
: nil
presence:normalizedLandmarkProto.has_presence() ? @(normalizedLandmarkProto.presence())
: nil];
}
@end

View File

@ -0,0 +1,26 @@
# Copyright 2023 The MediaPipe Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
package(default_visibility = [
"//mediapipe/tasks:internal",
"//mediapipe/tasks:users",
])
licenses(["notice"])
objc_library(
name = "MPPClassifierOptions",
srcs = ["sources/MPPClassifierOptions.m"],
hdrs = ["sources/MPPClassifierOptions.h"],
)

View File

@ -0,0 +1,57 @@
// Copyright 2023 The MediaPipe Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#import <Foundation/Foundation.h>
NS_ASSUME_NONNULL_BEGIN
/** Classifier options shared across MediaPipe iOS classification tasks. */
NS_SWIFT_NAME(ClassifierOptions)
@interface MPPClassifierOptions : NSObject <NSCopying>
/**
* The locale to use for display names specified through the TFLite Model Metadata, if any. Defaults
* to English.
*/
@property(nonatomic, copy, nullable) NSString *displayNamesLocale;
/**
* The maximum number of top-scored classification results to return. If < 0, all available results
* will be returned. If 0, an invalid argument error is returned.
*/
@property(nonatomic) NSInteger maxResults;
/**
* Score threshold to override the one provided in the model metadata (if any). Results below this
* value are rejected.
*/
@property(nonatomic) float scoreThreshold;
/**
* The allowlist of category names. If non-empty, detection results whose category name is not in
* this set will be filtered out. Duplicate or unknown category names are ignored. Mutually
* exclusive with categoryDenylist.
*/
@property(nonatomic, copy, nullable) NSArray<NSString *> *categoryAllowlist;
/**
* The denylist of category names. If non-empty, detection results whose category name is in this
* set will be filtered out. Duplicate or unknown category names are ignored. Mutually exclusive
* with categoryAllowlist.
*/
@property(nonatomic, copy, nullable) NSArray<NSString *> *categoryDenylist;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,40 @@
// Copyright 2023 The MediaPipe Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#import "mediapipe/tasks/ios/components/processors/sources/MPPClassifierOptions.h"
@implementation MPPClassifierOptions
- (instancetype)init {
self = [super init];
if (self) {
_maxResults = -1;
_scoreThreshold = 0;
}
return self;
}
- (id)copyWithZone:(NSZone *)zone {
MPPClassifierOptions *classifierOptions = [[MPPClassifierOptions alloc] init];
classifierOptions.displayNamesLocale = self.displayNamesLocale;
classifierOptions.maxResults = self.maxResults;
classifierOptions.scoreThreshold = self.scoreThreshold;
classifierOptions.categoryAllowlist = self.categoryAllowlist;
classifierOptions.categoryDenylist = self.categoryDenylist;
return classifierOptions;
}
@end

View File

@ -0,0 +1,31 @@
# Copyright 2023 The MediaPipe Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
package(default_visibility = [
"//mediapipe/tasks:internal",
"//mediapipe/tasks:users",
])
licenses(["notice"])
objc_library(
name = "MPPClassifierOptionsHelpers",
srcs = ["sources/MPPClassifierOptions+Helpers.mm"],
hdrs = ["sources/MPPClassifierOptions+Helpers.h"],
deps = [
"//mediapipe/tasks/cc/components/processors/proto:classifier_options_cc_proto",
"//mediapipe/tasks/ios/common/utils:NSStringHelpers",
"//mediapipe/tasks/ios/components/processors:MPPClassifierOptions",
],
)

View File

@ -0,0 +1,27 @@
// Copyright 2023 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mediapipe/tasks/cc/components/processors/proto/classifier_options.pb.h"
#import "mediapipe/tasks/ios/components/processors/sources/MPPClassifierOptions.h"
NS_ASSUME_NONNULL_BEGIN
@interface MPPClassifierOptions (Helpers)
- (void)copyToProto:
(mediapipe::tasks::components::processors::proto::ClassifierOptions *)classifierOptionsProto;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,44 @@
// Copyright 2023 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#import "mediapipe/tasks/ios/components/processors/utils/sources/MPPClassifierOptions+Helpers.h"
#import "mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.h"
namespace {
using ClassifierOptionsProto = ::mediapipe::tasks::components::processors::proto::ClassifierOptions;
}
@implementation MPPClassifierOptions (Helpers)
- (void)copyToProto:(ClassifierOptionsProto *)classifierOptionsProto {
classifierOptionsProto->Clear();
if (self.displayNamesLocale) {
classifierOptionsProto->set_display_names_locale(self.displayNamesLocale.cppString);
}
classifierOptionsProto->set_max_results((int)self.maxResults);
classifierOptionsProto->set_score_threshold(self.scoreThreshold);
for (NSString *category in self.categoryAllowlist) {
classifierOptionsProto->add_category_allowlist(category.cppString);
}
for (NSString *category in self.categoryDenylist) {
classifierOptionsProto->add_category_denylist(category.cppString);
}
}
@end

View File

@ -0,0 +1,40 @@
# Copyright 2023 The MediaPipe Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
package(default_visibility = ["//mediapipe/tasks:internal"])
licenses(["notice"])
objc_library(
name = "MPPGestureRecognizerResult",
srcs = ["sources/MPPGestureRecognizerResult.m"],
hdrs = ["sources/MPPGestureRecognizerResult.h"],
deps = [
"//mediapipe/tasks/ios/components/containers:MPPCategory",
"//mediapipe/tasks/ios/components/containers:MPPLandmark",
"//mediapipe/tasks/ios/core:MPPTaskResult",
],
)
objc_library(
name = "MPPGestureRecognizerOptions",
srcs = ["sources/MPPGestureRecognizerOptions.m"],
hdrs = ["sources/MPPGestureRecognizerOptions.h"],
deps = [
":MPPGestureRecognizerResult",
"//mediapipe/tasks/ios/components/processors:MPPClassifierOptions",
"//mediapipe/tasks/ios/core:MPPTaskOptions",
"//mediapipe/tasks/ios/vision/core:MPPRunningMode",
],
)

View File

@ -0,0 +1,70 @@
// Copyright 2023 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#import <Foundation/Foundation.h>
#import "mediapipe/tasks/ios/components/processors/sources/MPPClassifierOptions.h"
#import "mediapipe/tasks/ios/core/sources/MPPTaskOptions.h"
#import "mediapipe/tasks/ios/vision/core/sources/MPPRunningMode.h"
#import "mediapipe/tasks/ios/vision/gesture_recognizer/sources/MPPGestureRecognizerResult.h"
NS_ASSUME_NONNULL_BEGIN
/** Options for setting up a `MPPGestureRecognizer`. */
NS_SWIFT_NAME(GestureRecognizerOptions)
@interface MPPGestureRecognizerOptions : MPPTaskOptions <NSCopying>
@property(nonatomic) MPPRunningMode runningMode;
/**
* The user-defined result callback for processing live stream data. The result callback should only
* be specified when the running mode is set to the live stream mode.
* TODO: Add parameter `MPPImage` in the callback.
*/
@property(nonatomic, copy) void (^completion)
(MPPGestureRecognizerResult *result, NSInteger timestampMs, NSError *error);
/** Sets the maximum number of hands can be detected by the GestureRecognizer. */
@property(nonatomic) NSInteger numHands;
/** Sets minimum confidence score for the hand detection to be considered successful */
@property(nonatomic) float minHandDetectionConfidence;
/** Sets minimum confidence score of hand presence score in the hand landmark detection. */
@property(nonatomic) float minHandPresenceConfidence;
/** Sets the minimum confidence score for the hand tracking to be considered successful. */
@property(nonatomic) float minTrackingConfidence;
/**
* Sets the optional `MPPClassifierOptions` controlling the canned gestures classifier, such as
* score threshold, allow list and deny list of gestures. The categories for canned gesture
* classifiers are: ["None", "Closed_Fist", "Open_Palm", "Pointing_Up", "Thumb_Down", "Thumb_Up",
* "Victory", "ILoveYou"].
*
* TODO: Note this option is subject to change, after scoring merging calculator is implemented.
*/
@property(nonatomic, copy, nullable) MPPClassifierOptions *cannedGesturesClassifierOptions;
/**
* Sets the optional {@link ClassifierOptions} controlling the custom gestures classifier, such as
* score threshold, allow list and deny list of gestures.
*
* TODO: Note this option is subject to change, after scoring merging calculator is implemented.
*/
@property(nonatomic, copy, nullable) MPPClassifierOptions *customGesturesClassifierOptions;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,45 @@
// Copyright 2023 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#import "mediapipe/tasks/ios/vision/gesture_recognizer/sources/MPPGestureRecognizerOptions.h"
@implementation MPPGestureRecognizerOptions
- (instancetype)init {
self = [super init];
if (self) {
_numHands = 1;
_minHandDetectionConfidence = 0.5f;
_minHandPresenceConfidence = 0.5f;
_minTrackingConfidence = 0.5f;
}
return self;
}
- (id)copyWithZone:(NSZone *)zone {
MPPGestureRecognizerOptions *gestureRecognizerOptions = [super copyWithZone:zone];
gestureRecognizerOptions.runningMode = self.runningMode;
gestureRecognizerOptions.completion = self.completion;
gestureRecognizerOptions.numHands = self.numHands;
gestureRecognizerOptions.minHandDetectionConfidence = self.minHandDetectionConfidence;
gestureRecognizerOptions.minHandPresenceConfidence = self.minHandPresenceConfidence;
gestureRecognizerOptions.minTrackingConfidence = self.minTrackingConfidence;
gestureRecognizerOptions.cannedGesturesClassifierOptions = self.cannedGesturesClassifierOptions;
gestureRecognizerOptions.customGesturesClassifierOptions = self.customGesturesClassifierOptions;
return gestureRecognizerOptions;
}
@end

View File

@ -0,0 +1,65 @@
// Copyright 2023 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#import <Foundation/Foundation.h>
#import "mediapipe/tasks/ios/components/containers/sources/MPPCategory.h"
#import "mediapipe/tasks/ios/components/containers/sources/MPPLandmark.h"
#import "mediapipe/tasks/ios/core/sources/MPPTaskResult.h"
NS_ASSUME_NONNULL_BEGIN
/** Represents the gesture recognition results generated by MPPGestureRecognizer. */
NS_SWIFT_NAME(GestureRecognizerResult)
@interface MPPGestureRecognizerResult : MPPTaskResult
/** Hand landmarks of detected hands. */
@property(nonatomic, readonly) NSArray<NSArray<MPPLandmark *> *> *landmarks;
/** Hand landmarks in world coordniates of detected hands. */
@property(nonatomic, readonly) NSArray<NSArray<MPPLandmark *> *> *worldLandmarks;
/** Handedness of detected hands. */
@property(nonatomic, readonly) NSArray<NSArray<MPPCategory *> *> *handedness;
/**
* Recognized hand gestures of detected hands. Note that the index of the gesture is always -1,
* because the raw indices from multiple gesture classifiers cannot consolidate to a meaningful
* index.
*/
@property(nonatomic, readonly) NSArray<NSArray<MPPCategory *> *> *gestures;
/**
* Initializes a new `MPPGestureRecognizerResult` with the given landmarks, world landmarks,
* handedness, gestures and timestamp (in milliseconds).
*
* @param landmarks The hand landmarks of detected hands.
* @param worldLandmarks The hand landmarks in world coordniates of detected hands.
* @param handedness The handedness of detected hands.
* @param handedness The recognized hand gestures of detected hands.
* @param timestampInMilliseconds The timestamp for this result.
*
* @return An instance of `MPPGestureRecognizerResult` initialized with the given landmarks, world
* landmarks, handedness and gestures.
*
*/
- (instancetype)initWithLandmarks:(NSArray<NSArray<MPPLandmark *> *> *)landmarks
worldLandmarks:(NSArray<NSArray<MPPLandmark *> *> *)worldLandmarks
handedness:(NSArray<NSArray<MPPCategory *> *> *)handedness
gestures:(NSArray<NSArray<MPPCategory *> *> *)gestures
timestampInMilliseconds:(NSInteger)timestampInMilliseconds;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,34 @@
// Copyright 2023 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#import "mediapipe/tasks/ios/vision/gesture_recognizer/sources/MPPGestureRecognizerResult.h"
@implementation MPPGestureRecognizerResult
- (instancetype)initWithLandmarks:(NSArray<NSArray<MPPLandmark *> *> *)landmarks
worldLandmarks:(NSArray<NSArray<MPPLandmark *> *> *)worldLandmarks
handedness:(NSArray<NSArray<MPPCategory *> *> *)handedness
gestures:(NSArray<NSArray<MPPCategory *> *> *)gestures
timestampInMilliseconds:(NSInteger)timestampInMilliseconds {
self = [super initWithTimestampMs:timestampInMilliseconds];
if (self) {
_landmarks = landmarks;
_worldLandmarks = worldLandmarks;
_handedness = handedness;
_gestures = gestures;
}
return self;
}
@end