From 69017381af8d647b14242320e3a2c208c2b42d8f Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Wed, 24 May 2023 19:57:38 +0530 Subject: [PATCH 1/4] Updated MPPObjectDetectorResult Helpers to return empty result instead of nil --- .../utils/sources/MPPObjectDetectorResult+Helpers.mm | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/mediapipe/tasks/ios/vision/object_detector/utils/sources/MPPObjectDetectorResult+Helpers.mm b/mediapipe/tasks/ios/vision/object_detector/utils/sources/MPPObjectDetectorResult+Helpers.mm index b2f9cfc08..3a8a72f71 100644 --- a/mediapipe/tasks/ios/vision/object_detector/utils/sources/MPPObjectDetectorResult+Helpers.mm +++ b/mediapipe/tasks/ios/vision/object_detector/utils/sources/MPPObjectDetectorResult+Helpers.mm @@ -25,8 +25,12 @@ using ::mediapipe::Packet; + (nullable MPPObjectDetectorResult *)objectDetectorResultWithDetectionsPacket: (const Packet &)packet { + + NSInteger timestampInMilliseconds = (NSInteger)(packet.Timestamp().Value() / + kMicroSecondsPerMilliSecond); if (!packet.ValidateAsType>().ok()) { - return nil; + return [[MPPObjectDetectorResult alloc] initWithDetections:@[] + timestampInMilliseconds:timestampInMilliseconds]; } const std::vector &detectionProtos = packet.Get>(); @@ -39,8 +43,7 @@ using ::mediapipe::Packet; return [[MPPObjectDetectorResult alloc] initWithDetections:detections - timestampInMilliseconds:(NSInteger)(packet.Timestamp().Value() / - kMicroSecondsPerMilliSecond)]; + timestampInMilliseconds:timestampInMilliseconds]; } @end From 1e1693d9aaf348fdf1d2fa9d2836bd76056b1c54 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Wed, 24 May 2023 20:24:34 +0530 Subject: [PATCH 2/4] Added support to set delegates in MPPBaseOptions --- .../tasks/ios/core/sources/MPPBaseOptions.h | 21 ++++++++++++++++++- .../tasks/ios/core/sources/MPPBaseOptions.m | 1 + .../utils/sources/MPPBaseOptions+Helpers.mm | 13 ++++++++++++ 3 files changed, 34 insertions(+), 1 deletion(-) diff --git a/mediapipe/tasks/ios/core/sources/MPPBaseOptions.h b/mediapipe/tasks/ios/core/sources/MPPBaseOptions.h index bef6bb9ee..9f61d872a 100644 --- a/mediapipe/tasks/ios/core/sources/MPPBaseOptions.h +++ b/mediapipe/tasks/ios/core/sources/MPPBaseOptions.h @@ -16,6 +16,17 @@ NS_ASSUME_NONNULL_BEGIN +/** + * MediaPipe Tasks delegate. + */ + typedef NS_ENUM(NSUInteger, MPPDelegate) { + /** CPU. */ + MPPDelegateCPU, + + /** GPU. */ + MPPDelegateGPU + } NS_SWIFT_NAME(Delegate); + /** * Holds the base options that is used for creation of any type of task. It has fields with * important information acceleration configuration, TFLite model source etc. @@ -23,9 +34,17 @@ NS_ASSUME_NONNULL_BEGIN NS_SWIFT_NAME(BaseOptions) @interface MPPBaseOptions : NSObject -/** The path to the model asset to open and mmap in memory. */ +/** + * The absolute path to a model asset file (a tflite model or a model asset bundle file) stored in the app bundle. + */ @property(nonatomic, copy) NSString *modelAssetPath; +/** + * Device delegate to run the MediaPipe pipeline. If the delegate is not set, the default + * delegate CPU is used. + */ +@property(nonatomic) MPPDelegate delegate; + @end NS_ASSUME_NONNULL_END diff --git a/mediapipe/tasks/ios/core/sources/MPPBaseOptions.m b/mediapipe/tasks/ios/core/sources/MPPBaseOptions.m index a43119ad8..c3571c4b4 100644 --- a/mediapipe/tasks/ios/core/sources/MPPBaseOptions.m +++ b/mediapipe/tasks/ios/core/sources/MPPBaseOptions.m @@ -28,6 +28,7 @@ MPPBaseOptions *baseOptions = [[MPPBaseOptions alloc] init]; baseOptions.modelAssetPath = self.modelAssetPath; + baseOptions.delegate = self.delegate; return baseOptions; } diff --git a/mediapipe/tasks/ios/core/utils/sources/MPPBaseOptions+Helpers.mm b/mediapipe/tasks/ios/core/utils/sources/MPPBaseOptions+Helpers.mm index 73bcac49d..a97487cd9 100644 --- a/mediapipe/tasks/ios/core/utils/sources/MPPBaseOptions+Helpers.mm +++ b/mediapipe/tasks/ios/core/utils/sources/MPPBaseOptions+Helpers.mm @@ -33,6 +33,19 @@ using BaseOptionsProto = ::mediapipe::tasks::core::proto::BaseOptions; if (self.modelAssetPath) { baseOptionsProto->mutable_model_asset()->set_file_name(self.modelAssetPath.UTF8String); } + + switch (self.delegate) { + case MPPDelegateCPU: { + baseOptionsProto->mutable_acceleration()->mutable_tflite(); + break; + } + case MPPDelegateGPU: { + baseOptionsProto->mutable_acceleration()->mutable_gpu(); + break; + } + default: + break; + } } @end From 8f1a56f3c2ee36737a6511e7fc951eb65fd1d243 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Wed, 24 May 2023 20:24:41 +0530 Subject: [PATCH 3/4] Fixed typos --- .../object_detector/sources/MPPObjectDetector.h | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetector.h b/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetector.h index d3f946bbe..f8cfcc916 100644 --- a/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetector.h +++ b/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetector.h @@ -80,7 +80,7 @@ NS_SWIFT_NAME(ObjectDetector) * Creates a new instance of `MPPObjectDetector` from the given `MPPObjectDetectorOptions`. * * @param options The options of type `MPPObjectDetectorOptions` to use for configuring the - * `MPPImageClassifMPPObjectDetectorier`. + * `MPPObjectDetector`. * @param error An optional error parameter populated when there is an error in initializing the * object detector. * @@ -96,7 +96,7 @@ NS_SWIFT_NAME(ObjectDetector) * `MPPImage`. Only use this method when the `MPPObjectDetector` is created with * `MPPRunningModeImage`. * - * This method supports classification of RGBA images. If your `MPPImage` has a source type of + * This method supports detecting objects in RGBA images. If your `MPPImage` has a source type of * `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer * must have one of the following pixel format types: * 1. kCVPixelFormatType_32BGRA @@ -123,7 +123,7 @@ NS_SWIFT_NAME(ObjectDetector) * the provided `MPPImage`. Only use this method when the `MPPObjectDetector` is created with * `MPPRunningModeVideo`. * - * This method supports classification of RGBA images. If your `MPPImage` has a source type of + * This method supports detecting objects in of RGBA images. If your `MPPImage` has a source type of * `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer * must have one of the following pixel format types: * 1. kCVPixelFormatType_32BGRA @@ -161,7 +161,7 @@ NS_SWIFT_NAME(ObjectDetector) * It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent * to the object detector. The input timestamps must be monotonically increasing. * - * This method supports classification of RGBA images. If your `MPPImage` has a source type of + * This method supports detecting objects in RGBA images. If your `MPPImage` has a source type of * `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer * must have one of the following pixel format types: * 1. kCVPixelFormatType_32BGRA @@ -170,8 +170,8 @@ NS_SWIFT_NAME(ObjectDetector) * If the input `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color * space is RGB with an Alpha channel. * - * If this method is used for classifying live camera frames using `AVFoundation`, ensure that you - * request `AVCaptureVideoDataOutput` to output frames in `kCMPixelFormat_32RGBA` using its + * If this method is used for detecting objects in live camera frames using `AVFoundation`, ensure + * that you request `AVCaptureVideoDataOutput` to output frames in `kCMPixelFormat_32RGBA` using its * `videoSettings` property. * * @param image A live stream image data of type `MPPImage` on which object detection is to be From 1aa44abcaba07ab2e5ba768d6f3e9882a3e694f9 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Wed, 24 May 2023 20:25:45 +0530 Subject: [PATCH 4/4] Revert "Added support to set delegates in MPPBaseOptions" This reverts commit 1e1693d9aaf348fdf1d2fa9d2836bd76056b1c54. --- .../tasks/ios/core/sources/MPPBaseOptions.h | 21 +------------------ .../tasks/ios/core/sources/MPPBaseOptions.m | 1 - .../utils/sources/MPPBaseOptions+Helpers.mm | 13 ------------ 3 files changed, 1 insertion(+), 34 deletions(-) diff --git a/mediapipe/tasks/ios/core/sources/MPPBaseOptions.h b/mediapipe/tasks/ios/core/sources/MPPBaseOptions.h index 9f61d872a..bef6bb9ee 100644 --- a/mediapipe/tasks/ios/core/sources/MPPBaseOptions.h +++ b/mediapipe/tasks/ios/core/sources/MPPBaseOptions.h @@ -16,17 +16,6 @@ NS_ASSUME_NONNULL_BEGIN -/** - * MediaPipe Tasks delegate. - */ - typedef NS_ENUM(NSUInteger, MPPDelegate) { - /** CPU. */ - MPPDelegateCPU, - - /** GPU. */ - MPPDelegateGPU - } NS_SWIFT_NAME(Delegate); - /** * Holds the base options that is used for creation of any type of task. It has fields with * important information acceleration configuration, TFLite model source etc. @@ -34,17 +23,9 @@ NS_ASSUME_NONNULL_BEGIN NS_SWIFT_NAME(BaseOptions) @interface MPPBaseOptions : NSObject -/** - * The absolute path to a model asset file (a tflite model or a model asset bundle file) stored in the app bundle. - */ +/** The path to the model asset to open and mmap in memory. */ @property(nonatomic, copy) NSString *modelAssetPath; -/** - * Device delegate to run the MediaPipe pipeline. If the delegate is not set, the default - * delegate CPU is used. - */ -@property(nonatomic) MPPDelegate delegate; - @end NS_ASSUME_NONNULL_END diff --git a/mediapipe/tasks/ios/core/sources/MPPBaseOptions.m b/mediapipe/tasks/ios/core/sources/MPPBaseOptions.m index c3571c4b4..a43119ad8 100644 --- a/mediapipe/tasks/ios/core/sources/MPPBaseOptions.m +++ b/mediapipe/tasks/ios/core/sources/MPPBaseOptions.m @@ -28,7 +28,6 @@ MPPBaseOptions *baseOptions = [[MPPBaseOptions alloc] init]; baseOptions.modelAssetPath = self.modelAssetPath; - baseOptions.delegate = self.delegate; return baseOptions; } diff --git a/mediapipe/tasks/ios/core/utils/sources/MPPBaseOptions+Helpers.mm b/mediapipe/tasks/ios/core/utils/sources/MPPBaseOptions+Helpers.mm index a97487cd9..73bcac49d 100644 --- a/mediapipe/tasks/ios/core/utils/sources/MPPBaseOptions+Helpers.mm +++ b/mediapipe/tasks/ios/core/utils/sources/MPPBaseOptions+Helpers.mm @@ -33,19 +33,6 @@ using BaseOptionsProto = ::mediapipe::tasks::core::proto::BaseOptions; if (self.modelAssetPath) { baseOptionsProto->mutable_model_asset()->set_file_name(self.modelAssetPath.UTF8String); } - - switch (self.delegate) { - case MPPDelegateCPU: { - baseOptionsProto->mutable_acceleration()->mutable_tflite(); - break; - } - case MPPDelegateGPU: { - baseOptionsProto->mutable_acceleration()->mutable_gpu(); - break; - } - default: - break; - } } @end