From e7a0ed84e69b10e7ba0eed06d64ebf49c4c60357 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Thu, 31 Aug 2023 17:37:15 +0530 Subject: [PATCH] Updated iOS face detector to use refactored vision task runner --- .../tasks/ios/vision/face_detector/BUILD | 2 +- .../face_detector/sources/MPPFaceDetector.mm | 117 +++++------------- 2 files changed, 34 insertions(+), 85 deletions(-) diff --git a/mediapipe/tasks/ios/vision/face_detector/BUILD b/mediapipe/tasks/ios/vision/face_detector/BUILD index e4fc15616..eb34da1b6 100644 --- a/mediapipe/tasks/ios/vision/face_detector/BUILD +++ b/mediapipe/tasks/ios/vision/face_detector/BUILD @@ -55,7 +55,7 @@ objc_library( "//mediapipe/tasks/ios/core:MPPTaskInfo", "//mediapipe/tasks/ios/vision/core:MPPImage", "//mediapipe/tasks/ios/vision/core:MPPVisionPacketCreator", - "//mediapipe/tasks/ios/vision/core:MPPVisionTaskRunner", + "//mediapipe/tasks/ios/vision/core:MPPVisionTaskRunnerRefactored", "//mediapipe/tasks/ios/vision/face_detector/utils:MPPFaceDetectorOptionsHelpers", "//mediapipe/tasks/ios/vision/face_detector/utils:MPPFaceDetectorResultHelpers", ], diff --git a/mediapipe/tasks/ios/vision/face_detector/sources/MPPFaceDetector.mm b/mediapipe/tasks/ios/vision/face_detector/sources/MPPFaceDetector.mm index 7cb525fb0..6ddc9dbff 100644 --- a/mediapipe/tasks/ios/vision/face_detector/sources/MPPFaceDetector.mm +++ b/mediapipe/tasks/ios/vision/face_detector/sources/MPPFaceDetector.mm @@ -18,7 +18,7 @@ #import "mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.h" #import "mediapipe/tasks/ios/core/sources/MPPTaskInfo.h" #import "mediapipe/tasks/ios/vision/core/sources/MPPVisionPacketCreator.h" -#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunner.h" +#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunnerRefactored.h" #import "mediapipe/tasks/ios/vision/face_detector/utils/sources/MPPFaceDetectorOptions+Helpers.h" #import "mediapipe/tasks/ios/vision/face_detector/utils/sources/MPPFaceDetectorResult+Helpers.h" @@ -49,6 +49,12 @@ static NSString *const kTaskName = @"faceDetector"; } \ } +#define FaceDetectorResultWithOutputPacketMap(outputPacketMap) \ + { \ + [MPPFaceDetectorResult \ + faceDetectorResultWithDetectionsPacket:outputPacketMap[kDetectionsStreamName.cppString]] \ + } + @interface MPPFaceDetector () { /** iOS Vision Task Runner */ MPPVisionTaskRunner *_visionTaskRunner; @@ -102,11 +108,13 @@ static NSString *const kTaskName = @"faceDetector"; }; } - _visionTaskRunner = - [[MPPVisionTaskRunner alloc] initWithCalculatorGraphConfig:[taskInfo generateGraphConfig] - runningMode:options.runningMode - packetsCallback:std::move(packetsCallback) - error:error]; + _visionTaskRunner = [[MPPVisionTaskRunner alloc] initWithTaskInfo:taskInfo + runningMode:options.runningMode + roiAllowed:NO + packetsCallback:std::move(packetsCallback) + imageInputStreamName:kImageInStreamName + normRectInputStreamName:kNormRectStreamName + error:error]; if (!_visionTaskRunner) { return nil; @@ -124,95 +132,29 @@ static NSString *const kTaskName = @"faceDetector"; return [self initWithOptions:options error:error]; } -- (std::optional)inputPacketMapWithMPPImage:(MPPImage *)image - timestampInMilliseconds:(NSInteger)timestampInMilliseconds - error:(NSError **)error { - std::optional rect = - [_visionTaskRunner normalizedRectWithImageOrientation:image.orientation - imageSize:CGSizeMake(image.width, image.height) - error:error]; - if (!rect.has_value()) { - return std::nullopt; - } - - Packet imagePacket = [MPPVisionPacketCreator createPacketWithMPPImage:image - timestampInMilliseconds:timestampInMilliseconds - error:error]; - if (imagePacket.IsEmpty()) { - return std::nullopt; - } - - Packet normalizedRectPacket = - [MPPVisionPacketCreator createPacketWithNormalizedRect:rect.value() - timestampInMilliseconds:timestampInMilliseconds]; - - PacketMap inputPacketMap = InputPacketMap(imagePacket, normalizedRectPacket); - return inputPacketMap; -} - - (nullable MPPFaceDetectorResult *)detectInImage:(MPPImage *)image error:(NSError **)error { - std::optional rect = - [_visionTaskRunner normalizedRectWithImageOrientation:image.orientation - imageSize:CGSizeMake(image.width, image.height) - error:error]; - if (!rect.has_value()) { - return nil; - } + std::optional outputPacketMap = [_visionTaskRunner processImage:image error:error]; - Packet imagePacket = [MPPVisionPacketCreator createPacketWithMPPImage:image error:error]; - if (imagePacket.IsEmpty()) { - return nil; - } - - Packet normalizedRectPacket = - [MPPVisionPacketCreator createPacketWithNormalizedRect:rect.value()]; - - PacketMap inputPacketMap = InputPacketMap(imagePacket, normalizedRectPacket); - - std::optional outputPacketMap = [_visionTaskRunner processImagePacketMap:inputPacketMap - error:error]; - if (!outputPacketMap.has_value()) { - return nil; - } - - return [MPPFaceDetectorResult - faceDetectorResultWithDetectionsPacket:outputPacketMap - .value()[kDetectionsStreamName.cppString]]; + return [MPPFaceDetector faceDetectorResultWithOptionalOutputPacketMap:outputPacketMap]; } - (nullable MPPFaceDetectorResult *)detectInVideoFrame:(MPPImage *)image timestampInMilliseconds:(NSInteger)timestampInMilliseconds error:(NSError **)error { - std::optional inputPacketMap = [self inputPacketMapWithMPPImage:image - timestampInMilliseconds:timestampInMilliseconds - error:error]; - if (!inputPacketMap.has_value()) { - return nil; - } - std::optional outputPacketMap = - [_visionTaskRunner processVideoFramePacketMap:inputPacketMap.value() error:error]; + [_visionTaskRunner processVideoFrame:image + timestampInMilliseconds:timestampInMilliseconds + error:error]; - if (!outputPacketMap.has_value()) { - return nil; - } - - return [MPPFaceDetectorResult - faceDetectorResultWithDetectionsPacket:outputPacketMap - .value()[kDetectionsStreamName.cppString]]; + return [MPPFaceDetector faceDetectorResultWithOptionalOutputPacketMap:outputPacketMap]; } - (BOOL)detectAsyncInImage:(MPPImage *)image timestampInMilliseconds:(NSInteger)timestampInMilliseconds error:(NSError **)error { - std::optional inputPacketMap = [self inputPacketMapWithMPPImage:image - timestampInMilliseconds:timestampInMilliseconds - error:error]; - if (!inputPacketMap.has_value()) { - return NO; - } - - return [_visionTaskRunner processLiveStreamPacketMap:inputPacketMap.value() error:error]; + return [_visionTaskRunner processLiveStreamImage:image + timestampInMilliseconds:timestampInMilliseconds + error:error]; } - (void)processLiveStreamResult:(absl::StatusOr)liveStreamResult { @@ -237,9 +179,7 @@ static NSString *const kTaskName = @"faceDetector"; return; } - MPPFaceDetectorResult *result = [MPPFaceDetectorResult - faceDetectorResultWithDetectionsPacket:liveStreamResult - .value()[kDetectionsStreamName.cppString]]; + MPPFaceDetectorResult *result = FaceDetectorResultWithOutputPacketMap(liveStreamResult.value()); NSInteger timeStampInMilliseconds = outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() / @@ -252,4 +192,13 @@ static NSString *const kTaskName = @"faceDetector"; }); } ++ (nullable MPPFaceDetectorResult *)faceDetectorResultWithOptionalOutputPacketMap: + (std::optional)outputPacketMap { + if (!outputPacketMap.has_value()) { + return nil; + } + + return FaceDetectorResultWithOutputPacketMap(outputPacketMap.value()); +} + @end