Merge pull request #4750 from priankakariatyml:ios-vision-task-runner-refactor-impl-part2

PiperOrigin-RevId: 562823970
This commit is contained in:
Copybara-Service 2023-09-05 10:39:19 -07:00
commit a87613aa6c
10 changed files with 293 additions and 505 deletions

View File

@ -179,13 +179,13 @@ static NSString *const kTaskName = @"faceDetector";
MPPFaceDetectorResult *result = FaceDetectorResultWithOutputPacketMap(liveStreamResult.value()); MPPFaceDetectorResult *result = FaceDetectorResultWithOutputPacketMap(liveStreamResult.value());
NSInteger timeStampInMilliseconds = NSInteger timestampInMilliseconds =
outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() / outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() /
kMicrosecondsPerMillisecond; kMicrosecondsPerMillisecond;
dispatch_async(_callbackQueue, ^{ dispatch_async(_callbackQueue, ^{
[self.faceDetectorLiveStreamDelegate faceDetector:self [self.faceDetectorLiveStreamDelegate faceDetector:self
didFinishDetectionWithResult:result didFinishDetectionWithResult:result
timestampInMilliseconds:timeStampInMilliseconds timestampInMilliseconds:timestampInMilliseconds
error:callbackError]; error:callbackError];
}); });
} }

View File

@ -70,7 +70,7 @@ objc_library(
"//mediapipe/tasks/ios/core:MPPTaskInfo", "//mediapipe/tasks/ios/core:MPPTaskInfo",
"//mediapipe/tasks/ios/vision/core:MPPImage", "//mediapipe/tasks/ios/vision/core:MPPImage",
"//mediapipe/tasks/ios/vision/core:MPPVisionPacketCreator", "//mediapipe/tasks/ios/vision/core:MPPVisionPacketCreator",
"//mediapipe/tasks/ios/vision/core:MPPVisionTaskRunner", "//mediapipe/tasks/ios/vision/core:MPPVisionTaskRunnerRefactored",
"//mediapipe/tasks/ios/vision/face_landmarker/utils:MPPFaceLandmarkerOptionsHelpers", "//mediapipe/tasks/ios/vision/face_landmarker/utils:MPPFaceLandmarkerOptionsHelpers",
"//mediapipe/tasks/ios/vision/face_landmarker/utils:MPPFaceLandmarkerResultHelpers", "//mediapipe/tasks/ios/vision/face_landmarker/utils:MPPFaceLandmarkerResultHelpers",
], ],

View File

@ -19,7 +19,7 @@
#import "mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.h" #import "mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.h"
#import "mediapipe/tasks/ios/core/sources/MPPTaskInfo.h" #import "mediapipe/tasks/ios/core/sources/MPPTaskInfo.h"
#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionPacketCreator.h" #import "mediapipe/tasks/ios/vision/core/sources/MPPVisionPacketCreator.h"
#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunner.h" #import "mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunnerRefactored.h"
#import "mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarksConnections.h" #import "mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarksConnections.h"
#import "mediapipe/tasks/ios/vision/face_landmarker/utils/sources/MPPFaceLandmarkerOptions+Helpers.h" #import "mediapipe/tasks/ios/vision/face_landmarker/utils/sources/MPPFaceLandmarkerOptions+Helpers.h"
#import "mediapipe/tasks/ios/vision/face_landmarker/utils/sources/MPPFaceLandmarkerResult+Helpers.h" #import "mediapipe/tasks/ios/vision/face_landmarker/utils/sources/MPPFaceLandmarkerResult+Helpers.h"
@ -56,6 +56,13 @@ static NSString *const kTaskName = @"faceLandmarker";
} \ } \
} }
#define FaceLandmarkerResultWithOutputPacketMap(outputPacketMap) \
([MPPFaceLandmarkerResult \
faceLandmarkerResultWithLandmarksPacket:outputPacketMap[kLandmarksOutStreamName.cppString] \
blendshapesPacket:outputPacketMap[kBlendshapesOutStreamName.cppString] \
transformationMatrixesPacket:outputPacketMap[kFaceGeometryOutStreamName \
.cppString]])
@interface MPPFaceLandmarker () { @interface MPPFaceLandmarker () {
/** iOS Vision Task Runner */ /** iOS Vision Task Runner */
MPPVisionTaskRunner *_visionTaskRunner; MPPVisionTaskRunner *_visionTaskRunner;
@ -71,6 +78,8 @@ static NSString *const kTaskName = @"faceLandmarker";
@implementation MPPFaceLandmarker @implementation MPPFaceLandmarker
#pragma mark - Public
- (instancetype)initWithOptions:(MPPFaceLandmarkerOptions *)options error:(NSError **)error { - (instancetype)initWithOptions:(MPPFaceLandmarkerOptions *)options error:(NSError **)error {
self = [super init]; self = [super init];
if (self) { if (self) {
@ -124,12 +133,13 @@ static NSString *const kTaskName = @"faceLandmarker";
}; };
} }
_visionTaskRunner = _visionTaskRunner = [[MPPVisionTaskRunner alloc] initWithTaskInfo:taskInfo
[[MPPVisionTaskRunner alloc] initWithCalculatorGraphConfig:[taskInfo generateGraphConfig]
runningMode:options.runningMode runningMode:options.runningMode
roiAllowed:NO
packetsCallback:std::move(packetsCallback) packetsCallback:std::move(packetsCallback)
imageInputStreamName:kImageInStreamName
normRectInputStreamName:kNormRectStreamName
error:error]; error:error];
if (!_visionTaskRunner) { if (!_visionTaskRunner) {
return nil; return nil;
} }
@ -144,138 +154,29 @@ static NSString *const kTaskName = @"faceLandmarker";
return [self initWithOptions:options error:error]; return [self initWithOptions:options error:error];
} }
- (std::optional<PacketMap>)inputPacketMapWithMPPImage:(MPPImage *)image
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
error:(NSError **)error {
std::optional<NormalizedRect> rect =
[_visionTaskRunner normalizedRectWithImageOrientation:image.orientation
imageSize:CGSizeMake(image.width, image.height)
error:error];
if (!rect.has_value()) {
return std::nullopt;
}
Packet imagePacket = [MPPVisionPacketCreator createPacketWithMPPImage:image
timestampInMilliseconds:timestampInMilliseconds
error:error];
if (imagePacket.IsEmpty()) {
return std::nullopt;
}
Packet normalizedRectPacket =
[MPPVisionPacketCreator createPacketWithNormalizedRect:*rect
timestampInMilliseconds:timestampInMilliseconds];
PacketMap inputPacketMap = InputPacketMap(imagePacket, normalizedRectPacket);
return inputPacketMap;
}
- (nullable MPPFaceLandmarkerResult *)detectInImage:(MPPImage *)image error:(NSError **)error { - (nullable MPPFaceLandmarkerResult *)detectInImage:(MPPImage *)image error:(NSError **)error {
std::optional<NormalizedRect> rect = std::optional<PacketMap> outputPacketMap = [_visionTaskRunner processImage:image error:error];
[_visionTaskRunner normalizedRectWithImageOrientation:image.orientation
imageSize:CGSizeMake(image.width, image.height)
error:error];
if (!rect.has_value()) {
return nil;
}
Packet imagePacket = [MPPVisionPacketCreator createPacketWithMPPImage:image error:error]; return [MPPFaceLandmarker faceLandmarkerResultWithOptionalOutputPacketMap:outputPacketMap];
if (imagePacket.IsEmpty()) {
return nil;
}
Packet normalizedRectPacket = [MPPVisionPacketCreator createPacketWithNormalizedRect:*rect];
PacketMap inputPacketMap = InputPacketMap(imagePacket, normalizedRectPacket);
std::optional<PacketMap> outputPacketMap = [_visionTaskRunner processImagePacketMap:inputPacketMap
error:error];
if (!outputPacketMap.has_value()) {
return nil;
}
return [MPPFaceLandmarkerResult
faceLandmarkerResultWithLandmarksPacket:outputPacketMap
.value()[kLandmarksOutStreamName.cppString]
blendshapesPacket:outputPacketMap
.value()[kBlendshapesOutStreamName.cppString]
transformationMatrixesPacket:outputPacketMap
.value()[kFaceGeometryOutStreamName.cppString]];
} }
- (nullable MPPFaceLandmarkerResult *)detectInVideoFrame:(MPPImage *)image - (nullable MPPFaceLandmarkerResult *)detectInVideoFrame:(MPPImage *)image
timestampInMilliseconds:(NSInteger)timestampInMilliseconds timestampInMilliseconds:(NSInteger)timestampInMilliseconds
error:(NSError **)error { error:(NSError **)error {
std::optional<PacketMap> inputPacketMap = [self inputPacketMapWithMPPImage:image std::optional<PacketMap> outputPacketMap =
[_visionTaskRunner processVideoFrame:image
timestampInMilliseconds:timestampInMilliseconds timestampInMilliseconds:timestampInMilliseconds
error:error]; error:error];
if (!inputPacketMap.has_value()) {
return nil;
}
std::optional<PacketMap> outputPacketMap = return [MPPFaceLandmarker faceLandmarkerResultWithOptionalOutputPacketMap:outputPacketMap];
[_visionTaskRunner processVideoFramePacketMap:*inputPacketMap error:error];
if (!outputPacketMap.has_value()) {
return nil;
}
return [MPPFaceLandmarkerResult
faceLandmarkerResultWithLandmarksPacket:outputPacketMap
.value()[kLandmarksOutStreamName.cppString]
blendshapesPacket:outputPacketMap
.value()[kBlendshapesOutStreamName.cppString]
transformationMatrixesPacket:outputPacketMap
.value()[kFaceGeometryOutStreamName.cppString]];
} }
- (BOOL)detectAsyncInImage:(MPPImage *)image - (BOOL)detectAsyncInImage:(MPPImage *)image
timestampInMilliseconds:(NSInteger)timestampInMilliseconds timestampInMilliseconds:(NSInteger)timestampInMilliseconds
error:(NSError **)error { error:(NSError **)error {
std::optional<PacketMap> inputPacketMap = [self inputPacketMapWithMPPImage:image return [_visionTaskRunner processLiveStreamImage:image
timestampInMilliseconds:timestampInMilliseconds timestampInMilliseconds:timestampInMilliseconds
error:error]; error:error];
if (!inputPacketMap.has_value()) {
return NO;
}
return [_visionTaskRunner processLiveStreamPacketMap:*inputPacketMap error:error];
}
- (void)processLiveStreamResult:(absl::StatusOr<PacketMap>)liveStreamResult {
NSError *callbackError;
if (![MPPCommonUtils checkCppError:liveStreamResult.status() toError:&callbackError]) {
dispatch_async(_callbackQueue, ^{
[_faceLandmarkerLiveStreamDelegate faceLandmarker:self
didFinishDetectionWithResult:nil
timestampInMilliseconds:Timestamp::Unset().Value()
error:callbackError];
});
return;
}
PacketMap &outputPacketMap = *liveStreamResult;
if (outputPacketMap[kImageOutStreamName.cppString].IsEmpty()) {
// The graph did not return a result. We therefore do not raise the user callback. This mirrors
// returning `nil` in the other methods and is acceptable for the live stream delegate since
// it is expected that we drop frames and don't return results for every input.
return;
}
MPPFaceLandmarkerResult *result = [MPPFaceLandmarkerResult
faceLandmarkerResultWithLandmarksPacket:outputPacketMap[kLandmarksOutStreamName.cppString]
blendshapesPacket:outputPacketMap[kBlendshapesOutStreamName.cppString]
transformationMatrixesPacket:outputPacketMap[kFaceGeometryOutStreamName
.cppString]];
NSInteger timeStampInMilliseconds =
outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() /
kMicrosecondsPerMillisecond;
dispatch_async(_callbackQueue, ^{
[_faceLandmarkerLiveStreamDelegate faceLandmarker:self
didFinishDetectionWithResult:result
timestampInMilliseconds:timeStampInMilliseconds
error:callbackError];
});
} }
+ (NSArray<MPPConnection *> *)lipsConnections { + (NSArray<MPPConnection *> *)lipsConnections {
@ -322,4 +223,48 @@ static NSString *const kTaskName = @"faceLandmarker";
return MPPFaceConnections; return MPPFaceConnections;
} }
#pragma mark - Private
+ (nullable MPPFaceLandmarkerResult *)faceLandmarkerResultWithOptionalOutputPacketMap:
(std::optional<PacketMap>)outputPacketMap {
if (!outputPacketMap.has_value()) {
return nil;
}
return FaceLandmarkerResultWithOutputPacketMap(outputPacketMap.value());
}
- (void)processLiveStreamResult:(absl::StatusOr<PacketMap>)liveStreamResult {
NSError *callbackError;
if (![MPPCommonUtils checkCppError:liveStreamResult.status() toError:&callbackError]) {
dispatch_async(_callbackQueue, ^{
[_faceLandmarkerLiveStreamDelegate faceLandmarker:self
didFinishDetectionWithResult:nil
timestampInMilliseconds:Timestamp::Unset().Value()
error:callbackError];
});
return;
}
PacketMap &outputPacketMap = *liveStreamResult;
if (outputPacketMap[kImageOutStreamName.cppString].IsEmpty()) {
// The graph did not return a result. We therefore do not raise the user callback. This mirrors
// returning `nil` in the other methods and is acceptable for the live stream delegate since
// it is expected that we drop frames and don't return results for every input.
return;
}
MPPFaceLandmarkerResult *result = FaceLandmarkerResultWithOutputPacketMap(outputPacketMap);
NSInteger timestampInMilliseconds =
outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() /
kMicrosecondsPerMillisecond;
dispatch_async(_callbackQueue, ^{
[_faceLandmarkerLiveStreamDelegate faceLandmarker:self
didFinishDetectionWithResult:result
timestampInMilliseconds:timestampInMilliseconds
error:callbackError];
});
}
@end @end

View File

@ -58,7 +58,7 @@ objc_library(
"//mediapipe/tasks/ios/core:MPPTaskInfo", "//mediapipe/tasks/ios/core:MPPTaskInfo",
"//mediapipe/tasks/ios/vision/core:MPPImage", "//mediapipe/tasks/ios/vision/core:MPPImage",
"//mediapipe/tasks/ios/vision/core:MPPVisionPacketCreator", "//mediapipe/tasks/ios/vision/core:MPPVisionPacketCreator",
"//mediapipe/tasks/ios/vision/core:MPPVisionTaskRunner", "//mediapipe/tasks/ios/vision/core:MPPVisionTaskRunnerRefactored",
"//mediapipe/tasks/ios/vision/gesture_recognizer/utils:MPPGestureRecognizerOptionsHelpers", "//mediapipe/tasks/ios/vision/gesture_recognizer/utils:MPPGestureRecognizerOptionsHelpers",
"//mediapipe/tasks/ios/vision/gesture_recognizer/utils:MPPGestureRecognizerResultHelpers", "//mediapipe/tasks/ios/vision/gesture_recognizer/utils:MPPGestureRecognizerResultHelpers",
], ],

View File

@ -18,7 +18,7 @@
#import "mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.h" #import "mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.h"
#import "mediapipe/tasks/ios/core/sources/MPPTaskInfo.h" #import "mediapipe/tasks/ios/core/sources/MPPTaskInfo.h"
#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionPacketCreator.h" #import "mediapipe/tasks/ios/vision/core/sources/MPPVisionPacketCreator.h"
#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunner.h" #import "mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunnerRefactored.h"
#import "mediapipe/tasks/ios/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerOptions+Helpers.h" #import "mediapipe/tasks/ios/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerOptions+Helpers.h"
#import "mediapipe/tasks/ios/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+Helpers.h" #import "mediapipe/tasks/ios/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+Helpers.h"
@ -54,6 +54,17 @@ static NSString *const kTaskName = @"gestureRecognizer";
} \ } \
} }
#define GestureRecognizerResultWithOutputPacketMap(outputPacketMap) \
([MPPGestureRecognizerResult \
gestureRecognizerResultWithHandGesturesPacket:outputPacketMap[kHandGesturesOutStreamName \
.cppString] \
handednessPacket:outputPacketMap[kHandednessOutStreamName \
.cppString] \
handLandmarksPacket:outputPacketMap[kLandmarksOutStreamName \
.cppString] \
worldLandmarksPacket:outputPacketMap[kWorldLandmarksOutStreamName \
.cppString]])
@interface MPPGestureRecognizer () { @interface MPPGestureRecognizer () {
/** iOS Vision Task Runner */ /** iOS Vision Task Runner */
MPPVisionTaskRunner *_visionTaskRunner; MPPVisionTaskRunner *_visionTaskRunner;
@ -65,56 +76,6 @@ static NSString *const kTaskName = @"gestureRecognizer";
@implementation MPPGestureRecognizer @implementation MPPGestureRecognizer
- (nullable MPPGestureRecognizerResult *)gestureRecognizerResultWithOutputPacketMap:
(PacketMap &)outputPacketMap {
return [MPPGestureRecognizerResult
gestureRecognizerResultWithHandGesturesPacket:outputPacketMap[kHandGesturesOutStreamName
.cppString]
handednessPacket:outputPacketMap[kHandednessOutStreamName
.cppString]
handLandmarksPacket:outputPacketMap[kLandmarksOutStreamName
.cppString]
worldLandmarksPacket:outputPacketMap[kWorldLandmarksOutStreamName
.cppString]];
}
- (void)processLiveStreamResult:(absl::StatusOr<PacketMap>)liveStreamResult {
if (![self.gestureRecognizerLiveStreamDelegate
respondsToSelector:@selector(gestureRecognizer:
didFinishRecognitionWithResult:timestampInMilliseconds:error:)]) {
return;
}
NSError *callbackError = nil;
if (![MPPCommonUtils checkCppError:liveStreamResult.status() toError:&callbackError]) {
dispatch_async(_callbackQueue, ^{
[self.gestureRecognizerLiveStreamDelegate gestureRecognizer:self
didFinishRecognitionWithResult:nil
timestampInMilliseconds:Timestamp::Unset().Value()
error:callbackError];
});
return;
}
PacketMap &outputPacketMap = liveStreamResult.value();
if (outputPacketMap[kImageOutStreamName.cppString].IsEmpty()) {
return;
}
MPPGestureRecognizerResult *result =
[self gestureRecognizerResultWithOutputPacketMap:outputPacketMap];
NSInteger timeStampInMilliseconds =
outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() /
kMicroSecondsPerMilliSecond;
dispatch_async(_callbackQueue, ^{
[self.gestureRecognizerLiveStreamDelegate gestureRecognizer:self
didFinishRecognitionWithResult:result
timestampInMilliseconds:timeStampInMilliseconds
error:callbackError];
});
}
- (instancetype)initWithOptions:(MPPGestureRecognizerOptions *)options error:(NSError **)error { - (instancetype)initWithOptions:(MPPGestureRecognizerOptions *)options error:(NSError **)error {
self = [super init]; self = [super init];
if (self) { if (self) {
@ -161,10 +122,12 @@ static NSString *const kTaskName = @"gestureRecognizer";
}; };
} }
_visionTaskRunner = _visionTaskRunner = [[MPPVisionTaskRunner alloc] initWithTaskInfo:taskInfo
[[MPPVisionTaskRunner alloc] initWithCalculatorGraphConfig:[taskInfo generateGraphConfig]
runningMode:options.runningMode runningMode:options.runningMode
roiAllowed:NO
packetsCallback:std::move(packetsCallback) packetsCallback:std::move(packetsCallback)
imageInputStreamName:kImageInStreamName
normRectInputStreamName:kNormRectInStreamName
error:error]; error:error];
if (!_visionTaskRunner) { if (!_visionTaskRunner) {
return nil; return nil;
@ -181,93 +144,76 @@ static NSString *const kTaskName = @"gestureRecognizer";
return [self initWithOptions:options error:error]; return [self initWithOptions:options error:error];
} }
- (nullable MPPGestureRecognizerResult *)gestureRecognizerResultWithOptionalOutputPacketMap:
(std::optional<PacketMap> &)outputPacketMap {
if (!outputPacketMap.has_value()) {
return nil;
}
MPPGestureRecognizerResult *result =
[self gestureRecognizerResultWithOutputPacketMap:outputPacketMap.value()];
return result;
}
- (nullable MPPGestureRecognizerResult *)recognizeImage:(MPPImage *)image error:(NSError **)error { - (nullable MPPGestureRecognizerResult *)recognizeImage:(MPPImage *)image error:(NSError **)error {
std::optional<NormalizedRect> rect = std::optional<PacketMap> outputPacketMap = [_visionTaskRunner processImage:image error:error];
[_visionTaskRunner normalizedRectWithImageOrientation:image.orientation
imageSize:CGSizeMake(image.width, image.height)
error:error];
if (!rect.has_value()) {
return nil;
}
Packet imagePacket = [MPPVisionPacketCreator createPacketWithMPPImage:image error:error]; return [MPPGestureRecognizer gestureRecognizerResultWithOptionalOutputPacketMap:outputPacketMap];
if (imagePacket.IsEmpty()) {
return nil;
}
Packet normalizedRectPacket =
[MPPVisionPacketCreator createPacketWithNormalizedRect:rect.value()];
PacketMap inputPacketMap = InputPacketMap(imagePacket, normalizedRectPacket);
std::optional<PacketMap> outputPacketMap = [_visionTaskRunner processImagePacketMap:inputPacketMap
error:error];
return [self gestureRecognizerResultWithOptionalOutputPacketMap:outputPacketMap];
}
- (std::optional<PacketMap>)inputPacketMapWithMPPImage:(MPPImage *)image
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
error:(NSError **)error {
std::optional<NormalizedRect> rect =
[_visionTaskRunner normalizedRectWithImageOrientation:image.orientation
imageSize:CGSizeMake(image.width, image.height)
error:error];
if (!rect.has_value()) {
return std::nullopt;
}
Packet imagePacket = [MPPVisionPacketCreator createPacketWithMPPImage:image
timestampInMilliseconds:timestampInMilliseconds
error:error];
if (imagePacket.IsEmpty()) {
return std::nullopt;
}
Packet normalizedRectPacket =
[MPPVisionPacketCreator createPacketWithNormalizedRect:rect.value()
timestampInMilliseconds:timestampInMilliseconds];
PacketMap inputPacketMap = InputPacketMap(imagePacket, normalizedRectPacket);
return inputPacketMap;
} }
- (nullable MPPGestureRecognizerResult *)recognizeVideoFrame:(MPPImage *)image - (nullable MPPGestureRecognizerResult *)recognizeVideoFrame:(MPPImage *)image
timestampInMilliseconds:(NSInteger)timestampInMilliseconds timestampInMilliseconds:(NSInteger)timestampInMilliseconds
error:(NSError **)error { error:(NSError **)error {
std::optional<PacketMap> inputPacketMap = [self inputPacketMapWithMPPImage:image std::optional<PacketMap> outputPacketMap =
[_visionTaskRunner processVideoFrame:image
timestampInMilliseconds:timestampInMilliseconds timestampInMilliseconds:timestampInMilliseconds
error:error]; error:error];
if (!inputPacketMap.has_value()) {
return nil;
}
std::optional<PacketMap> outputPacketMap = return [MPPGestureRecognizer gestureRecognizerResultWithOptionalOutputPacketMap:outputPacketMap];
[_visionTaskRunner processVideoFramePacketMap:inputPacketMap.value() error:error];
return [self gestureRecognizerResultWithOptionalOutputPacketMap:outputPacketMap];
} }
- (BOOL)recognizeAsyncImage:(MPPImage *)image - (BOOL)recognizeAsyncImage:(MPPImage *)image
timestampInMilliseconds:(NSInteger)timestampInMilliseconds timestampInMilliseconds:(NSInteger)timestampInMilliseconds
error:(NSError **)error { error:(NSError **)error {
std::optional<PacketMap> inputPacketMap = [self inputPacketMapWithMPPImage:image return [_visionTaskRunner processLiveStreamImage:image
timestampInMilliseconds:timestampInMilliseconds timestampInMilliseconds:timestampInMilliseconds
error:error]; error:error];
if (!inputPacketMap.has_value()) {
return NO;
} }
return [_visionTaskRunner processLiveStreamPacketMap:inputPacketMap.value() error:error]; #pragma mark - Private
- (void)processLiveStreamResult:(absl::StatusOr<PacketMap>)liveStreamResult {
if (![self.gestureRecognizerLiveStreamDelegate
respondsToSelector:@selector(gestureRecognizer:
didFinishRecognitionWithResult:timestampInMilliseconds:error:)]) {
return;
}
NSError *callbackError = nil;
if (![MPPCommonUtils checkCppError:liveStreamResult.status() toError:&callbackError]) {
dispatch_async(_callbackQueue, ^{
[self.gestureRecognizerLiveStreamDelegate gestureRecognizer:self
didFinishRecognitionWithResult:nil
timestampInMilliseconds:Timestamp::Unset().Value()
error:callbackError];
});
return;
}
PacketMap &outputPacketMap = liveStreamResult.value();
if (outputPacketMap[kImageOutStreamName.cppString].IsEmpty()) {
return;
}
MPPGestureRecognizerResult *result = GestureRecognizerResultWithOutputPacketMap(outputPacketMap);
NSInteger timestampInMilliseconds =
outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() /
kMicroSecondsPerMilliSecond;
dispatch_async(_callbackQueue, ^{
[self.gestureRecognizerLiveStreamDelegate gestureRecognizer:self
didFinishRecognitionWithResult:result
timestampInMilliseconds:timestampInMilliseconds
error:callbackError];
});
}
+ (nullable MPPGestureRecognizerResult *)gestureRecognizerResultWithOptionalOutputPacketMap:
(std::optional<PacketMap> &)outputPacketMap {
if (!outputPacketMap.has_value()) {
return nil;
}
return GestureRecognizerResultWithOutputPacketMap(outputPacketMap.value());
} }
@end @end

View File

@ -66,7 +66,7 @@ objc_library(
"//mediapipe/tasks/ios/core:MPPTaskInfo", "//mediapipe/tasks/ios/core:MPPTaskInfo",
"//mediapipe/tasks/ios/vision/core:MPPImage", "//mediapipe/tasks/ios/vision/core:MPPImage",
"//mediapipe/tasks/ios/vision/core:MPPVisionPacketCreator", "//mediapipe/tasks/ios/vision/core:MPPVisionPacketCreator",
"//mediapipe/tasks/ios/vision/core:MPPVisionTaskRunner", "//mediapipe/tasks/ios/vision/core:MPPVisionTaskRunnerRefactored",
"//mediapipe/tasks/ios/vision/hand_landmarker/utils:MPPHandLandmarkerOptionsHelpers", "//mediapipe/tasks/ios/vision/hand_landmarker/utils:MPPHandLandmarkerOptionsHelpers",
"//mediapipe/tasks/ios/vision/hand_landmarker/utils:MPPHandLandmarkerResultHelpers", "//mediapipe/tasks/ios/vision/hand_landmarker/utils:MPPHandLandmarkerResultHelpers",
], ],

View File

@ -18,7 +18,7 @@
#import "mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.h" #import "mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.h"
#import "mediapipe/tasks/ios/core/sources/MPPTaskInfo.h" #import "mediapipe/tasks/ios/core/sources/MPPTaskInfo.h"
#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionPacketCreator.h" #import "mediapipe/tasks/ios/vision/core/sources/MPPVisionPacketCreator.h"
#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunner.h" #import "mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunnerRefactored.h"
#import "mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarksConnections.h" #import "mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarksConnections.h"
#import "mediapipe/tasks/ios/vision/hand_landmarker/utils/sources/MPPHandLandmarkerOptions+Helpers.h" #import "mediapipe/tasks/ios/vision/hand_landmarker/utils/sources/MPPHandLandmarkerOptions+Helpers.h"
#import "mediapipe/tasks/ios/vision/hand_landmarker/utils/sources/MPPHandLandmarkerResult+Helpers.h" #import "mediapipe/tasks/ios/vision/hand_landmarker/utils/sources/MPPHandLandmarkerResult+Helpers.h"
@ -53,6 +53,14 @@ static NSString *const kTaskName = @"handLandmarker";
} \ } \
} }
#define HandLandmarkerResultWithOutputPacketMap(outputPacketMap) \
([MPPHandLandmarkerResult \
handLandmarkerResultWithLandmarksPacket:outputPacketMap[kLandmarksOutStreamName.cppString] \
worldLandmarksPacket:outputPacketMap[kWorldLandmarksOutStreamName \
.cppString] \
handednessPacket:outputPacketMap[kHandednessOutStreamName \
.cppString]])
@interface MPPHandLandmarker () { @interface MPPHandLandmarker () {
/** iOS Vision Task Runner */ /** iOS Vision Task Runner */
MPPVisionTaskRunner *_visionTaskRunner; MPPVisionTaskRunner *_visionTaskRunner;
@ -63,50 +71,7 @@ static NSString *const kTaskName = @"handLandmarker";
@implementation MPPHandLandmarker @implementation MPPHandLandmarker
- (nullable MPPHandLandmarkerResult *)handLandmarkerResultWithOutputPacketMap: #pragma mark - Public
(PacketMap &)outputPacketMap {
return [MPPHandLandmarkerResult
handLandmarkerResultWithLandmarksPacket:outputPacketMap[kLandmarksOutStreamName.cppString]
worldLandmarksPacket:outputPacketMap[kWorldLandmarksOutStreamName
.cppString]
handednessPacket:outputPacketMap[kHandednessOutStreamName.cppString]];
}
- (void)processLiveStreamResult:(absl::StatusOr<PacketMap>)liveStreamResult {
if (![self.handLandmarkerLiveStreamDelegate
respondsToSelector:@selector(handLandmarker:
didFinishDetectionWithResult:timestampInMilliseconds:error:)]) {
return;
}
NSError *callbackError = nil;
if (![MPPCommonUtils checkCppError:liveStreamResult.status() toError:&callbackError]) {
dispatch_async(_callbackQueue, ^{
[self.handLandmarkerLiveStreamDelegate handLandmarker:self
didFinishDetectionWithResult:nil
timestampInMilliseconds:Timestamp::Unset().Value()
error:callbackError];
});
return;
}
PacketMap &outputPacketMap = liveStreamResult.value();
if (outputPacketMap[kImageOutStreamName.cppString].IsEmpty()) {
return;
}
MPPHandLandmarkerResult *result = [self handLandmarkerResultWithOutputPacketMap:outputPacketMap];
NSInteger timeStampInMilliseconds =
outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() /
kMicroSecondsPerMilliSecond;
dispatch_async(_callbackQueue, ^{
[self.handLandmarkerLiveStreamDelegate handLandmarker:self
didFinishDetectionWithResult:result
timestampInMilliseconds:timeStampInMilliseconds
error:callbackError];
});
}
- (instancetype)initWithOptions:(MPPHandLandmarkerOptions *)options error:(NSError **)error { - (instancetype)initWithOptions:(MPPHandLandmarkerOptions *)options error:(NSError **)error {
self = [super init]; self = [super init];
@ -152,11 +117,14 @@ static NSString *const kTaskName = @"handLandmarker";
}; };
} }
_visionTaskRunner = _visionTaskRunner = [[MPPVisionTaskRunner alloc] initWithTaskInfo:taskInfo
[[MPPVisionTaskRunner alloc] initWithCalculatorGraphConfig:[taskInfo generateGraphConfig]
runningMode:options.runningMode runningMode:options.runningMode
roiAllowed:NO
packetsCallback:std::move(packetsCallback) packetsCallback:std::move(packetsCallback)
imageInputStreamName:kImageInStreamName
normRectInputStreamName:kNormRectInStreamName
error:error]; error:error];
if (!_visionTaskRunner) { if (!_visionTaskRunner) {
return nil; return nil;
} }
@ -172,93 +140,29 @@ static NSString *const kTaskName = @"handLandmarker";
return [self initWithOptions:options error:error]; return [self initWithOptions:options error:error];
} }
- (nullable MPPHandLandmarkerResult *)handLandmarkerResultWithOptionalOutputPacketMap:
(std::optional<PacketMap> &)outputPacketMap {
if (!outputPacketMap.has_value()) {
return nil;
}
MPPHandLandmarkerResult *result =
[self handLandmarkerResultWithOutputPacketMap:outputPacketMap.value()];
return result;
}
- (nullable MPPHandLandmarkerResult *)detectInImage:(MPPImage *)image error:(NSError **)error { - (nullable MPPHandLandmarkerResult *)detectInImage:(MPPImage *)image error:(NSError **)error {
std::optional<NormalizedRect> rect = std::optional<PacketMap> outputPacketMap = [_visionTaskRunner processImage:image error:error];
[_visionTaskRunner normalizedRectWithImageOrientation:image.orientation
imageSize:CGSizeMake(image.width, image.height)
error:error];
if (!rect.has_value()) {
return nil;
}
Packet imagePacket = [MPPVisionPacketCreator createPacketWithMPPImage:image error:error]; return [MPPHandLandmarker handLandmarkerResultWithOptionalOutputPacketMap:outputPacketMap];
if (imagePacket.IsEmpty()) {
return nil;
}
Packet normalizedRectPacket =
[MPPVisionPacketCreator createPacketWithNormalizedRect:rect.value()];
PacketMap inputPacketMap = InputPacketMap(imagePacket, normalizedRectPacket);
std::optional<PacketMap> outputPacketMap = [_visionTaskRunner processImagePacketMap:inputPacketMap
error:error];
return [self handLandmarkerResultWithOptionalOutputPacketMap:outputPacketMap];
}
- (std::optional<PacketMap>)inputPacketMapWithMPPImage:(MPPImage *)image
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
error:(NSError **)error {
std::optional<NormalizedRect> rect =
[_visionTaskRunner normalizedRectWithImageOrientation:image.orientation
imageSize:CGSizeMake(image.width, image.height)
error:error];
if (!rect.has_value()) {
return std::nullopt;
}
Packet imagePacket = [MPPVisionPacketCreator createPacketWithMPPImage:image
timestampInMilliseconds:timestampInMilliseconds
error:error];
if (imagePacket.IsEmpty()) {
return std::nullopt;
}
Packet normalizedRectPacket =
[MPPVisionPacketCreator createPacketWithNormalizedRect:rect.value()
timestampInMilliseconds:timestampInMilliseconds];
PacketMap inputPacketMap = InputPacketMap(imagePacket, normalizedRectPacket);
return inputPacketMap;
} }
- (nullable MPPHandLandmarkerResult *)detectInVideoFrame:(MPPImage *)image - (nullable MPPHandLandmarkerResult *)detectInVideoFrame:(MPPImage *)image
timestampInMilliseconds:(NSInteger)timestampInMilliseconds timestampInMilliseconds:(NSInteger)timestampInMilliseconds
error:(NSError **)error { error:(NSError **)error {
std::optional<PacketMap> inputPacketMap = [self inputPacketMapWithMPPImage:image std::optional<PacketMap> outputPacketMap =
[_visionTaskRunner processVideoFrame:image
timestampInMilliseconds:timestampInMilliseconds timestampInMilliseconds:timestampInMilliseconds
error:error]; error:error];
if (!inputPacketMap.has_value()) {
return nil;
}
std::optional<PacketMap> outputPacketMap = return [MPPHandLandmarker handLandmarkerResultWithOptionalOutputPacketMap:outputPacketMap];
[_visionTaskRunner processVideoFramePacketMap:inputPacketMap.value() error:error];
return [self handLandmarkerResultWithOptionalOutputPacketMap:outputPacketMap];
} }
- (BOOL)detectAsyncInImage:(MPPImage *)image - (BOOL)detectAsyncInImage:(MPPImage *)image
timestampInMilliseconds:(NSInteger)timestampInMilliseconds timestampInMilliseconds:(NSInteger)timestampInMilliseconds
error:(NSError **)error { error:(NSError **)error {
std::optional<PacketMap> inputPacketMap = [self inputPacketMapWithMPPImage:image return [_visionTaskRunner processLiveStreamImage:image
timestampInMilliseconds:timestampInMilliseconds timestampInMilliseconds:timestampInMilliseconds
error:error]; error:error];
if (!inputPacketMap.has_value()) {
return NO;
}
return [_visionTaskRunner processLiveStreamPacketMap:inputPacketMap.value() error:error];
} }
+ (NSArray<MPPConnection *> *)handPalmConnections { + (NSArray<MPPConnection *> *)handPalmConnections {
@ -285,4 +189,51 @@ static NSString *const kTaskName = @"handLandmarker";
return MPPHandConnections; return MPPHandConnections;
} }
#pragma mark - Private
- (void)processLiveStreamResult:(absl::StatusOr<PacketMap>)liveStreamResult {
if (![self.handLandmarkerLiveStreamDelegate
respondsToSelector:@selector(handLandmarker:
didFinishDetectionWithResult:timestampInMilliseconds:error:)]) {
return;
}
NSError *callbackError = nil;
if (![MPPCommonUtils checkCppError:liveStreamResult.status() toError:&callbackError]) {
dispatch_async(_callbackQueue, ^{
[self.handLandmarkerLiveStreamDelegate handLandmarker:self
didFinishDetectionWithResult:nil
timestampInMilliseconds:Timestamp::Unset().Value()
error:callbackError];
});
return;
}
PacketMap &outputPacketMap = liveStreamResult.value();
if (outputPacketMap[kImageOutStreamName.cppString].IsEmpty()) {
return;
}
MPPHandLandmarkerResult *result = HandLandmarkerResultWithOutputPacketMap(outputPacketMap);
NSInteger timestampInMilliseconds =
outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() /
kMicroSecondsPerMilliSecond;
dispatch_async(_callbackQueue, ^{
[self.handLandmarkerLiveStreamDelegate handLandmarker:self
didFinishDetectionWithResult:result
timestampInMilliseconds:timestampInMilliseconds
error:callbackError];
});
}
+ (nullable MPPHandLandmarkerResult *)handLandmarkerResultWithOptionalOutputPacketMap:
(std::optional<PacketMap> &)outputPacketMap {
if (!outputPacketMap.has_value()) {
return nil;
}
return HandLandmarkerResultWithOutputPacketMap(outputPacketMap.value());
}
@end @end

View File

@ -219,13 +219,13 @@ static const int kMicroSecondsPerMilliSecond = 1000;
MPPImageClassifierResult *result = ImageClassifierResultWithOutputPacketMap(outputPacketMap); MPPImageClassifierResult *result = ImageClassifierResultWithOutputPacketMap(outputPacketMap);
NSInteger timeStampInMilliseconds = NSInteger timestampInMilliseconds =
outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() / outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() /
kMicroSecondsPerMilliSecond; kMicroSecondsPerMilliSecond;
dispatch_async(_callbackQueue, ^{ dispatch_async(_callbackQueue, ^{
[self.imageClassifierLiveStreamDelegate imageClassifier:self [self.imageClassifierLiveStreamDelegate imageClassifier:self
didFinishClassificationWithResult:result didFinishClassificationWithResult:result
timestampInMilliseconds:timeStampInMilliseconds timestampInMilliseconds:timestampInMilliseconds
error:callbackError]; error:callbackError];
}); });
} }

View File

@ -55,7 +55,7 @@ objc_library(
"//mediapipe/tasks/ios/core:MPPTaskInfo", "//mediapipe/tasks/ios/core:MPPTaskInfo",
"//mediapipe/tasks/ios/vision/core:MPPImage", "//mediapipe/tasks/ios/vision/core:MPPImage",
"//mediapipe/tasks/ios/vision/core:MPPVisionPacketCreator", "//mediapipe/tasks/ios/vision/core:MPPVisionPacketCreator",
"//mediapipe/tasks/ios/vision/core:MPPVisionTaskRunner", "//mediapipe/tasks/ios/vision/core:MPPVisionTaskRunnerRefactored",
"//mediapipe/tasks/ios/vision/object_detector/utils:MPPObjectDetectorOptionsHelpers", "//mediapipe/tasks/ios/vision/object_detector/utils:MPPObjectDetectorOptionsHelpers",
"//mediapipe/tasks/ios/vision/object_detector/utils:MPPObjectDetectorResultHelpers", "//mediapipe/tasks/ios/vision/object_detector/utils:MPPObjectDetectorResultHelpers",
], ],

View File

@ -18,7 +18,7 @@
#import "mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.h" #import "mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.h"
#import "mediapipe/tasks/ios/core/sources/MPPTaskInfo.h" #import "mediapipe/tasks/ios/core/sources/MPPTaskInfo.h"
#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionPacketCreator.h" #import "mediapipe/tasks/ios/vision/core/sources/MPPVisionPacketCreator.h"
#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunner.h" #import "mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunnerRefactored.h"
#import "mediapipe/tasks/ios/vision/object_detector/utils/sources/MPPObjectDetectorOptions+Helpers.h" #import "mediapipe/tasks/ios/vision/object_detector/utils/sources/MPPObjectDetectorOptions+Helpers.h"
#import "mediapipe/tasks/ios/vision/object_detector/utils/sources/MPPObjectDetectorResult+Helpers.h" #import "mediapipe/tasks/ios/vision/object_detector/utils/sources/MPPObjectDetectorResult+Helpers.h"
@ -47,6 +47,10 @@ static NSString *const kTaskName = @"objectDetector";
} \ } \
} }
#define ObjectDetectorResultWithOutputPacketMap(outputPacketMap) \
([MPPObjectDetectorResult \
objectDetectorResultWithDetectionsPacket:outputPacketMap[kDetectionsStreamName.cppString]])
@interface MPPObjectDetector () { @interface MPPObjectDetector () {
/** iOS Vision Task Runner */ /** iOS Vision Task Runner */
MPPVisionTaskRunner *_visionTaskRunner; MPPVisionTaskRunner *_visionTaskRunner;
@ -57,42 +61,7 @@ static NSString *const kTaskName = @"objectDetector";
@implementation MPPObjectDetector @implementation MPPObjectDetector
- (void)processLiveStreamResult:(absl::StatusOr<PacketMap>)liveStreamResult { #pragma mark - Public
if (![self.objectDetectorLiveStreamDelegate
respondsToSelector:@selector(objectDetector:
didFinishDetectionWithResult:timestampInMilliseconds:error:)]) {
return;
}
NSError *callbackError = nil;
if (![MPPCommonUtils checkCppError:liveStreamResult.status() toError:&callbackError]) {
dispatch_async(_callbackQueue, ^{
[self.objectDetectorLiveStreamDelegate objectDetector:self
didFinishDetectionWithResult:nil
timestampInMilliseconds:Timestamp::Unset().Value()
error:callbackError];
});
return;
}
PacketMap &outputPacketMap = liveStreamResult.value();
if (outputPacketMap[kImageOutStreamName.cppString].IsEmpty()) {
return;
}
MPPObjectDetectorResult *result = [MPPObjectDetectorResult
objectDetectorResultWithDetectionsPacket:outputPacketMap[kDetectionsStreamName.cppString]];
NSInteger timeStampInMilliseconds =
outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() /
kMicroSecondsPerMilliSecond;
dispatch_async(_callbackQueue, ^{
[self.objectDetectorLiveStreamDelegate objectDetector:self
didFinishDetectionWithResult:result
timestampInMilliseconds:timeStampInMilliseconds
error:callbackError];
});
}
- (instancetype)initWithOptions:(MPPObjectDetectorOptions *)options error:(NSError **)error { - (instancetype)initWithOptions:(MPPObjectDetectorOptions *)options error:(NSError **)error {
self = [super init]; self = [super init];
@ -135,10 +104,12 @@ static NSString *const kTaskName = @"objectDetector";
}; };
} }
_visionTaskRunner = _visionTaskRunner = [[MPPVisionTaskRunner alloc] initWithTaskInfo:taskInfo
[[MPPVisionTaskRunner alloc] initWithCalculatorGraphConfig:[taskInfo generateGraphConfig]
runningMode:options.runningMode runningMode:options.runningMode
roiAllowed:NO
packetsCallback:std::move(packetsCallback) packetsCallback:std::move(packetsCallback)
imageInputStreamName:kImageInStreamName
normRectInputStreamName:kNormRectStreamName
error:error]; error:error];
if (!_visionTaskRunner) { if (!_visionTaskRunner) {
@ -157,101 +128,76 @@ static NSString *const kTaskName = @"objectDetector";
return [self initWithOptions:options error:error]; return [self initWithOptions:options error:error];
} }
- (std::optional<PacketMap>)inputPacketMapWithMPPImage:(MPPImage *)image
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
error:(NSError **)error {
std::optional<NormalizedRect> rect =
[_visionTaskRunner normalizedRectWithImageOrientation:image.orientation
imageSize:CGSizeMake(image.width, image.height)
error:error];
if (!rect.has_value()) {
return std::nullopt;
}
Packet imagePacket = [MPPVisionPacketCreator createPacketWithMPPImage:image
timestampInMilliseconds:timestampInMilliseconds
error:error];
if (imagePacket.IsEmpty()) {
return std::nullopt;
}
Packet normalizedRectPacket =
[MPPVisionPacketCreator createPacketWithNormalizedRect:rect.value()
timestampInMilliseconds:timestampInMilliseconds];
PacketMap inputPacketMap = InputPacketMap(imagePacket, normalizedRectPacket);
return inputPacketMap;
}
- (nullable MPPObjectDetectorResult *)detectInImage:(MPPImage *)image
regionOfInterest:(CGRect)roi
error:(NSError **)error {
std::optional<NormalizedRect> rect =
[_visionTaskRunner normalizedRectWithImageOrientation:image.orientation
imageSize:CGSizeMake(image.width, image.height)
error:error];
if (!rect.has_value()) {
return nil;
}
Packet imagePacket = [MPPVisionPacketCreator createPacketWithMPPImage:image error:error];
if (imagePacket.IsEmpty()) {
return nil;
}
Packet normalizedRectPacket =
[MPPVisionPacketCreator createPacketWithNormalizedRect:rect.value()];
PacketMap inputPacketMap = InputPacketMap(imagePacket, normalizedRectPacket);
std::optional<PacketMap> outputPacketMap = [_visionTaskRunner processImagePacketMap:inputPacketMap
error:error];
if (!outputPacketMap.has_value()) {
return nil;
}
return [MPPObjectDetectorResult
objectDetectorResultWithDetectionsPacket:outputPacketMap
.value()[kDetectionsStreamName.cppString]];
}
- (nullable MPPObjectDetectorResult *)detectInImage:(MPPImage *)image error:(NSError **)error { - (nullable MPPObjectDetectorResult *)detectInImage:(MPPImage *)image error:(NSError **)error {
return [self detectInImage:image regionOfInterest:CGRectZero error:error]; std::optional<PacketMap> outputPacketMap = [_visionTaskRunner processImage:image error:error];
return [MPPObjectDetector objectDetectorResultWithOptionalOutputPacketMap:outputPacketMap];
} }
- (nullable MPPObjectDetectorResult *)detectInVideoFrame:(MPPImage *)image - (nullable MPPObjectDetectorResult *)detectInVideoFrame:(MPPImage *)image
timestampInMilliseconds:(NSInteger)timestampInMilliseconds timestampInMilliseconds:(NSInteger)timestampInMilliseconds
error:(NSError **)error { error:(NSError **)error {
std::optional<PacketMap> inputPacketMap = [self inputPacketMapWithMPPImage:image std::optional<PacketMap> outputPacketMap =
[_visionTaskRunner processVideoFrame:image
timestampInMilliseconds:timestampInMilliseconds timestampInMilliseconds:timestampInMilliseconds
error:error]; error:error];
if (!inputPacketMap.has_value()) {
return nil;
}
std::optional<PacketMap> outputPacketMap = return [MPPObjectDetector objectDetectorResultWithOptionalOutputPacketMap:outputPacketMap];
[_visionTaskRunner processVideoFramePacketMap:inputPacketMap.value() error:error];
if (!outputPacketMap.has_value()) {
return nil;
}
return [MPPObjectDetectorResult
objectDetectorResultWithDetectionsPacket:outputPacketMap
.value()[kDetectionsStreamName.cppString]];
} }
- (BOOL)detectAsyncInImage:(MPPImage *)image - (BOOL)detectAsyncInImage:(MPPImage *)image
timestampInMilliseconds:(NSInteger)timestampInMilliseconds timestampInMilliseconds:(NSInteger)timestampInMilliseconds
error:(NSError **)error { error:(NSError **)error {
std::optional<PacketMap> inputPacketMap = [self inputPacketMapWithMPPImage:image return [_visionTaskRunner processLiveStreamImage:image
timestampInMilliseconds:timestampInMilliseconds timestampInMilliseconds:timestampInMilliseconds
error:error]; error:error];
if (!inputPacketMap.has_value()) {
return NO;
} }
return [_visionTaskRunner processLiveStreamPacketMap:inputPacketMap.value() error:error]; #pragma mark - Private
- (void)processLiveStreamResult:(absl::StatusOr<PacketMap>)liveStreamResult {
if (![self.objectDetectorLiveStreamDelegate
respondsToSelector:@selector(objectDetector:
didFinishDetectionWithResult:timestampInMilliseconds:error:)]) {
return;
}
NSError *callbackError = nil;
if (![MPPCommonUtils checkCppError:liveStreamResult.status() toError:&callbackError]) {
dispatch_async(_callbackQueue, ^{
[self.objectDetectorLiveStreamDelegate objectDetector:self
didFinishDetectionWithResult:nil
timestampInMilliseconds:Timestamp::Unset().Value()
error:callbackError];
});
return;
}
PacketMap &outputPacketMap = liveStreamResult.value();
if (outputPacketMap[kImageOutStreamName.cppString].IsEmpty()) {
return;
}
MPPObjectDetectorResult *result = ObjectDetectorResultWithOutputPacketMap(outputPacketMap);
NSInteger timestampInMilliseconds =
outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() /
kMicroSecondsPerMilliSecond;
dispatch_async(_callbackQueue, ^{
[self.objectDetectorLiveStreamDelegate objectDetector:self
didFinishDetectionWithResult:result
timestampInMilliseconds:timestampInMilliseconds
error:callbackError];
});
}
+ (nullable MPPObjectDetectorResult *)objectDetectorResultWithOptionalOutputPacketMap:
(std::optional<PacketMap> &)outputPacketMap {
if (!outputPacketMap.has_value()) {
return nil;
}
return ObjectDetectorResultWithOutputPacketMap(outputPacketMap.value());
} }
@end @end