Updated iOS gesture recognizer to use refactored vision task runner
This commit is contained in:
parent
fe9c7a47e9
commit
020ca5eb77
|
@ -58,7 +58,7 @@ objc_library(
|
||||||
"//mediapipe/tasks/ios/core:MPPTaskInfo",
|
"//mediapipe/tasks/ios/core:MPPTaskInfo",
|
||||||
"//mediapipe/tasks/ios/vision/core:MPPImage",
|
"//mediapipe/tasks/ios/vision/core:MPPImage",
|
||||||
"//mediapipe/tasks/ios/vision/core:MPPVisionPacketCreator",
|
"//mediapipe/tasks/ios/vision/core:MPPVisionPacketCreator",
|
||||||
"//mediapipe/tasks/ios/vision/core:MPPVisionTaskRunner",
|
"//mediapipe/tasks/ios/vision/core:MPPVisionTaskRunnerRefactored",
|
||||||
"//mediapipe/tasks/ios/vision/gesture_recognizer/utils:MPPGestureRecognizerOptionsHelpers",
|
"//mediapipe/tasks/ios/vision/gesture_recognizer/utils:MPPGestureRecognizerOptionsHelpers",
|
||||||
"//mediapipe/tasks/ios/vision/gesture_recognizer/utils:MPPGestureRecognizerResultHelpers",
|
"//mediapipe/tasks/ios/vision/gesture_recognizer/utils:MPPGestureRecognizerResultHelpers",
|
||||||
],
|
],
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
#import "mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.h"
|
#import "mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.h"
|
||||||
#import "mediapipe/tasks/ios/core/sources/MPPTaskInfo.h"
|
#import "mediapipe/tasks/ios/core/sources/MPPTaskInfo.h"
|
||||||
#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionPacketCreator.h"
|
#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionPacketCreator.h"
|
||||||
#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunner.h"
|
#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunnerRefactored.h"
|
||||||
#import "mediapipe/tasks/ios/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerOptions+Helpers.h"
|
#import "mediapipe/tasks/ios/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerOptions+Helpers.h"
|
||||||
#import "mediapipe/tasks/ios/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+Helpers.h"
|
#import "mediapipe/tasks/ios/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+Helpers.h"
|
||||||
|
|
||||||
|
@ -54,6 +54,19 @@ static NSString *const kTaskName = @"gestureRecognizer";
|
||||||
} \
|
} \
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#define GestureRecognizerResultWithOutputPacketMap(outputPacketMap) \
|
||||||
|
{ \
|
||||||
|
[MPPGestureRecognizerResult \
|
||||||
|
gestureRecognizerResultWithHandGesturesPacket:outputPacketMap[kHandGesturesOutStreamName \
|
||||||
|
.cppString] \
|
||||||
|
handednessPacket:outputPacketMap[kHandednessOutStreamName \
|
||||||
|
.cppString] \
|
||||||
|
handLandmarksPacket:outputPacketMap[kLandmarksOutStreamName \
|
||||||
|
.cppString] \
|
||||||
|
worldLandmarksPacket:outputPacketMap[kWorldLandmarksOutStreamName \
|
||||||
|
.cppString]] \
|
||||||
|
}
|
||||||
|
|
||||||
@interface MPPGestureRecognizer () {
|
@interface MPPGestureRecognizer () {
|
||||||
/** iOS Vision Task Runner */
|
/** iOS Vision Task Runner */
|
||||||
MPPVisionTaskRunner *_visionTaskRunner;
|
MPPVisionTaskRunner *_visionTaskRunner;
|
||||||
|
@ -65,56 +78,6 @@ static NSString *const kTaskName = @"gestureRecognizer";
|
||||||
|
|
||||||
@implementation MPPGestureRecognizer
|
@implementation MPPGestureRecognizer
|
||||||
|
|
||||||
- (nullable MPPGestureRecognizerResult *)gestureRecognizerResultWithOutputPacketMap:
|
|
||||||
(PacketMap &)outputPacketMap {
|
|
||||||
return [MPPGestureRecognizerResult
|
|
||||||
gestureRecognizerResultWithHandGesturesPacket:outputPacketMap[kHandGesturesOutStreamName
|
|
||||||
.cppString]
|
|
||||||
handednessPacket:outputPacketMap[kHandednessOutStreamName
|
|
||||||
.cppString]
|
|
||||||
handLandmarksPacket:outputPacketMap[kLandmarksOutStreamName
|
|
||||||
.cppString]
|
|
||||||
worldLandmarksPacket:outputPacketMap[kWorldLandmarksOutStreamName
|
|
||||||
.cppString]];
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)processLiveStreamResult:(absl::StatusOr<PacketMap>)liveStreamResult {
|
|
||||||
if (![self.gestureRecognizerLiveStreamDelegate
|
|
||||||
respondsToSelector:@selector(gestureRecognizer:
|
|
||||||
didFinishRecognitionWithResult:timestampInMilliseconds:error:)]) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
NSError *callbackError = nil;
|
|
||||||
if (![MPPCommonUtils checkCppError:liveStreamResult.status() toError:&callbackError]) {
|
|
||||||
dispatch_async(_callbackQueue, ^{
|
|
||||||
[self.gestureRecognizerLiveStreamDelegate gestureRecognizer:self
|
|
||||||
didFinishRecognitionWithResult:nil
|
|
||||||
timestampInMilliseconds:Timestamp::Unset().Value()
|
|
||||||
error:callbackError];
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
PacketMap &outputPacketMap = liveStreamResult.value();
|
|
||||||
if (outputPacketMap[kImageOutStreamName.cppString].IsEmpty()) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
MPPGestureRecognizerResult *result =
|
|
||||||
[self gestureRecognizerResultWithOutputPacketMap:outputPacketMap];
|
|
||||||
|
|
||||||
NSInteger timeStampInMilliseconds =
|
|
||||||
outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() /
|
|
||||||
kMicroSecondsPerMilliSecond;
|
|
||||||
dispatch_async(_callbackQueue, ^{
|
|
||||||
[self.gestureRecognizerLiveStreamDelegate gestureRecognizer:self
|
|
||||||
didFinishRecognitionWithResult:result
|
|
||||||
timestampInMilliseconds:timeStampInMilliseconds
|
|
||||||
error:callbackError];
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
- (instancetype)initWithOptions:(MPPGestureRecognizerOptions *)options error:(NSError **)error {
|
- (instancetype)initWithOptions:(MPPGestureRecognizerOptions *)options error:(NSError **)error {
|
||||||
self = [super init];
|
self = [super init];
|
||||||
if (self) {
|
if (self) {
|
||||||
|
@ -161,10 +124,12 @@ static NSString *const kTaskName = @"gestureRecognizer";
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
_visionTaskRunner =
|
_visionTaskRunner = [[MPPVisionTaskRunner alloc] initWithTaskInfo:taskInfo
|
||||||
[[MPPVisionTaskRunner alloc] initWithCalculatorGraphConfig:[taskInfo generateGraphConfig]
|
|
||||||
runningMode:options.runningMode
|
runningMode:options.runningMode
|
||||||
|
roiAllowed:NO
|
||||||
packetsCallback:std::move(packetsCallback)
|
packetsCallback:std::move(packetsCallback)
|
||||||
|
imageInputStreamName:kImageInStreamName
|
||||||
|
normRectInputStreamName:kNormRectInStreamName
|
||||||
error:error];
|
error:error];
|
||||||
if (!_visionTaskRunner) {
|
if (!_visionTaskRunner) {
|
||||||
return nil;
|
return nil;
|
||||||
|
@ -181,93 +146,76 @@ static NSString *const kTaskName = @"gestureRecognizer";
|
||||||
return [self initWithOptions:options error:error];
|
return [self initWithOptions:options error:error];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (nullable MPPGestureRecognizerResult *)gestureRecognizerResultWithOptionalOutputPacketMap:
|
|
||||||
(std::optional<PacketMap> &)outputPacketMap {
|
|
||||||
if (!outputPacketMap.has_value()) {
|
|
||||||
return nil;
|
|
||||||
}
|
|
||||||
MPPGestureRecognizerResult *result =
|
|
||||||
[self gestureRecognizerResultWithOutputPacketMap:outputPacketMap.value()];
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (nullable MPPGestureRecognizerResult *)recognizeImage:(MPPImage *)image error:(NSError **)error {
|
- (nullable MPPGestureRecognizerResult *)recognizeImage:(MPPImage *)image error:(NSError **)error {
|
||||||
std::optional<NormalizedRect> rect =
|
std::optional<PacketMap> outputPacketMap = [_visionTaskRunner processImage:image error:error];
|
||||||
[_visionTaskRunner normalizedRectWithImageOrientation:image.orientation
|
|
||||||
imageSize:CGSizeMake(image.width, image.height)
|
|
||||||
error:error];
|
|
||||||
if (!rect.has_value()) {
|
|
||||||
return nil;
|
|
||||||
}
|
|
||||||
|
|
||||||
Packet imagePacket = [MPPVisionPacketCreator createPacketWithMPPImage:image error:error];
|
return [MPPGestureRecognizer gestureRecognizerResultWithOptionalOutputPacketMap:outputPacketMap];
|
||||||
if (imagePacket.IsEmpty()) {
|
|
||||||
return nil;
|
|
||||||
}
|
|
||||||
|
|
||||||
Packet normalizedRectPacket =
|
|
||||||
[MPPVisionPacketCreator createPacketWithNormalizedRect:rect.value()];
|
|
||||||
|
|
||||||
PacketMap inputPacketMap = InputPacketMap(imagePacket, normalizedRectPacket);
|
|
||||||
|
|
||||||
std::optional<PacketMap> outputPacketMap = [_visionTaskRunner processImagePacketMap:inputPacketMap
|
|
||||||
error:error];
|
|
||||||
return [self gestureRecognizerResultWithOptionalOutputPacketMap:outputPacketMap];
|
|
||||||
}
|
|
||||||
|
|
||||||
- (std::optional<PacketMap>)inputPacketMapWithMPPImage:(MPPImage *)image
|
|
||||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
|
||||||
error:(NSError **)error {
|
|
||||||
std::optional<NormalizedRect> rect =
|
|
||||||
[_visionTaskRunner normalizedRectWithImageOrientation:image.orientation
|
|
||||||
imageSize:CGSizeMake(image.width, image.height)
|
|
||||||
error:error];
|
|
||||||
if (!rect.has_value()) {
|
|
||||||
return std::nullopt;
|
|
||||||
}
|
|
||||||
|
|
||||||
Packet imagePacket = [MPPVisionPacketCreator createPacketWithMPPImage:image
|
|
||||||
timestampInMilliseconds:timestampInMilliseconds
|
|
||||||
error:error];
|
|
||||||
if (imagePacket.IsEmpty()) {
|
|
||||||
return std::nullopt;
|
|
||||||
}
|
|
||||||
|
|
||||||
Packet normalizedRectPacket =
|
|
||||||
[MPPVisionPacketCreator createPacketWithNormalizedRect:rect.value()
|
|
||||||
timestampInMilliseconds:timestampInMilliseconds];
|
|
||||||
|
|
||||||
PacketMap inputPacketMap = InputPacketMap(imagePacket, normalizedRectPacket);
|
|
||||||
return inputPacketMap;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
- (nullable MPPGestureRecognizerResult *)recognizeVideoFrame:(MPPImage *)image
|
- (nullable MPPGestureRecognizerResult *)recognizeVideoFrame:(MPPImage *)image
|
||||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
||||||
error:(NSError **)error {
|
error:(NSError **)error {
|
||||||
std::optional<PacketMap> inputPacketMap = [self inputPacketMapWithMPPImage:image
|
std::optional<PacketMap> outputPacketMap =
|
||||||
|
[_visionTaskRunner processVideoFrame:image
|
||||||
timestampInMilliseconds:timestampInMilliseconds
|
timestampInMilliseconds:timestampInMilliseconds
|
||||||
error:error];
|
error:error];
|
||||||
if (!inputPacketMap.has_value()) {
|
|
||||||
return nil;
|
|
||||||
}
|
|
||||||
|
|
||||||
std::optional<PacketMap> outputPacketMap =
|
return [MPPGestureRecognizer gestureRecognizerResultWithOptionalOutputPacketMap:outputPacketMap];
|
||||||
[_visionTaskRunner processVideoFramePacketMap:inputPacketMap.value() error:error];
|
|
||||||
|
|
||||||
return [self gestureRecognizerResultWithOptionalOutputPacketMap:outputPacketMap];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
- (BOOL)recognizeAsyncImage:(MPPImage *)image
|
- (BOOL)recognizeAsyncImage:(MPPImage *)image
|
||||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
||||||
error:(NSError **)error {
|
error:(NSError **)error {
|
||||||
std::optional<PacketMap> inputPacketMap = [self inputPacketMapWithMPPImage:image
|
return [_visionTaskRunner processLiveStreamImage:image
|
||||||
timestampInMilliseconds:timestampInMilliseconds
|
timestampInMilliseconds:timestampInMilliseconds
|
||||||
error:error];
|
error:error];
|
||||||
if (!inputPacketMap.has_value()) {
|
|
||||||
return NO;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return [_visionTaskRunner processLiveStreamPacketMap:inputPacketMap.value() error:error];
|
#pragma mark - Private
|
||||||
|
|
||||||
|
- (void)processLiveStreamResult:(absl::StatusOr<PacketMap>)liveStreamResult {
|
||||||
|
if (![self.gestureRecognizerLiveStreamDelegate
|
||||||
|
respondsToSelector:@selector(gestureRecognizer:
|
||||||
|
didFinishRecognitionWithResult:timestampInMilliseconds:error:)]) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
NSError *callbackError = nil;
|
||||||
|
if (![MPPCommonUtils checkCppError:liveStreamResult.status() toError:&callbackError]) {
|
||||||
|
dispatch_async(_callbackQueue, ^{
|
||||||
|
[self.gestureRecognizerLiveStreamDelegate gestureRecognizer:self
|
||||||
|
didFinishRecognitionWithResult:nil
|
||||||
|
timestampInMilliseconds:Timestamp::Unset().Value()
|
||||||
|
error:callbackError];
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
PacketMap &outputPacketMap = liveStreamResult.value();
|
||||||
|
if (outputPacketMap[kImageOutStreamName.cppString].IsEmpty()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
MPPGestureRecognizerResult *result = GestureRecognizerResultWithOutputPacketMap(outputPacketMap);
|
||||||
|
|
||||||
|
NSInteger timeStampInMilliseconds =
|
||||||
|
outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() /
|
||||||
|
kMicroSecondsPerMilliSecond;
|
||||||
|
dispatch_async(_callbackQueue, ^{
|
||||||
|
[self.gestureRecognizerLiveStreamDelegate gestureRecognizer:self
|
||||||
|
didFinishRecognitionWithResult:result
|
||||||
|
timestampInMilliseconds:timeStampInMilliseconds
|
||||||
|
error:callbackError];
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
+ (nullable MPPGestureRecognizerResult *)gestureRecognizerResultWithOptionalOutputPacketMap:
|
||||||
|
(std::optional<PacketMap> &)outputPacketMap {
|
||||||
|
if (!outputPacketMap.has_value()) {
|
||||||
|
return nil;
|
||||||
|
}
|
||||||
|
|
||||||
|
return GestureRecognizerResultWithOutputPacketMap(outputPacketMap.value());
|
||||||
}
|
}
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
Loading…
Reference in New Issue
Block a user