From 1323a5271c70e05b76833949faea67279a8bcb93 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Thu, 4 May 2023 16:39:43 +0530 Subject: [PATCH 01/17] Added method to create unique dispatch queue names in MPPVisionTaskRunner --- .../common/utils/sources/NSString+Helpers.h | 1 + .../common/utils/sources/NSString+Helpers.mm | 4 ++++ mediapipe/tasks/ios/vision/core/BUILD | 1 + .../vision/core/sources/MPPVisionTaskRunner.h | 14 +++++++++++ .../core/sources/MPPVisionTaskRunner.mm | 23 ++++++++++++++----- 5 files changed, 37 insertions(+), 6 deletions(-) diff --git a/mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.h b/mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.h index 8433baaaf..1697a28e4 100644 --- a/mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.h +++ b/mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.h @@ -24,6 +24,7 @@ NS_ASSUME_NONNULL_BEGIN + (NSString *)stringWithCppString:(std::string)text; ++ (NSString *)uuidString; @end NS_ASSUME_NONNULL_END diff --git a/mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.mm b/mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.mm index b7d486e80..dfc7749be 100644 --- a/mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.mm +++ b/mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.mm @@ -24,4 +24,8 @@ return [NSString stringWithCString:text.c_str() encoding:[NSString defaultCStringEncoding]]; } ++ (NSString *)uuidString{ + return [[NSUUID UUID] UUIDString]; +} + @end diff --git a/mediapipe/tasks/ios/vision/core/BUILD b/mediapipe/tasks/ios/vision/core/BUILD index 4b72fc91d..328d9e892 100644 --- a/mediapipe/tasks/ios/vision/core/BUILD +++ b/mediapipe/tasks/ios/vision/core/BUILD @@ -58,6 +58,7 @@ objc_library( "//mediapipe/framework/formats:rect_cc_proto", "//mediapipe/tasks/ios/common:MPPCommon", "//mediapipe/tasks/ios/common/utils:MPPCommonUtils", + "//mediapipe/tasks/ios/common/utils:NSStringHelpers", "//mediapipe/tasks/ios/core:MPPTaskRunner", "//third_party/apple_frameworks:UIKit", "@com_google_absl//absl/status:statusor", diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunner.h b/mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunner.h index 92b5563ef..318b24051 100644 --- a/mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunner.h +++ b/mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunner.h @@ -141,6 +141,20 @@ NS_ASSUME_NONNULL_BEGIN (mediapipe::tasks::core::PacketsCallback)packetsCallback error:(NSError **)error NS_UNAVAILABLE; +/** + * This method returns a unique dispatch queue name by adding the given suffix and a `UUID` to the + * pre-defined queue name prefix for vision tasks. The vision tasks can use this method to get + * unique dispatch queue names which are consistent with other vision tasks. + * Dispatch queue names need not be unique, but for easy debugging we ensure that the queue names + * are unique. + * + * @param suffix A suffix that identifies a dispatch queue's functionality. + * + * @return A unique dispatch queue name by adding the given suffix and a `UUID` to the pre-defined + * queue name prefix for vision tasks. + */ ++ (const char *)uniqueDispatchQueueNameWithSuffix:(NSString *)suffix; + - (instancetype)init NS_UNAVAILABLE; + (instancetype)new NS_UNAVAILABLE; diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunner.mm b/mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunner.mm index 40b68a211..0089e516f 100644 --- a/mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunner.mm +++ b/mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunner.mm @@ -16,6 +16,7 @@ #import "mediapipe/tasks/ios/common/sources/MPPCommon.h" #import "mediapipe/tasks/ios/common/utils/sources/MPPCommonUtils.h" +#import "mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.h" #include "absl/status/statusor.h" @@ -37,6 +38,8 @@ static const NSInteger kMPPOrientationDegreesDown = -180; /** Rotation degrees for a 90 degree rotation to the left. */ static const NSInteger kMPPOrientationDegreesLeft = -270; +static NSString *const kTaskPrefix = @"com.mediapipe.tasks.vision"; + @interface MPPVisionTaskRunner () { MPPRunningMode _runningMode; } @@ -54,18 +57,21 @@ static const NSInteger kMPPOrientationDegreesLeft = -270; if (packetsCallback) { [MPPCommonUtils createCustomError:error withCode:MPPTasksErrorCodeInvalidArgumentError - description:@"The vision task is in image or video mode, a " - @"user-defined result callback should not be provided."]; + description:@"The vision task is in image or video mode. The " + @"delegate must not be set in the task's options."]; return nil; } break; } case MPPRunningModeLiveStream: { if (!packetsCallback) { - [MPPCommonUtils createCustomError:error - withCode:MPPTasksErrorCodeInvalidArgumentError - description:@"The vision task is in live stream mode, a user-defined " - @"result callback must be provided."]; + [MPPCommonUtils + createCustomError:error + withCode:MPPTasksErrorCodeInvalidArgumentError + description: + @"The vision task is in live stream mode. An object must be set as the " + @"delegate of the task in its options to ensure asynchronous delivery of " + @"results."]; return nil; } break; @@ -197,4 +203,9 @@ static const NSInteger kMPPOrientationDegreesLeft = -270; return [self sendPacketMap:packetMap error:error]; } ++ (const char *)uniqueDispatchQueueNameWithSuffix:(NSString *)suffix { + return [NSString stringWithFormat:@"%@.%@_%@", kTaskPrefix, suffix, [NSString uuidString]] + .UTF8String; +} + @end From ab4b07646c994b62b9b3f341defffe9786cc6191 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Thu, 4 May 2023 16:43:18 +0530 Subject: [PATCH 02/17] Updated MPPImageClassifier to use delegates instead of completion blocks for callback. --- .../MPPImageClassifierTests.m | 126 ++++++++++++------ .../sources/MPPImageClassifier.h | 16 ++- .../sources/MPPImageClassifier.mm | 63 +++++++-- .../sources/MPPImageClassifierOptions.h | 56 +++++++- .../sources/MPPImageClassifierOptions.m | 2 +- 5 files changed, 205 insertions(+), 58 deletions(-) diff --git a/mediapipe/tasks/ios/test/vision/image_classifier/MPPImageClassifierTests.m b/mediapipe/tasks/ios/test/vision/image_classifier/MPPImageClassifierTests.m index 58abb5c70..a2fd68482 100644 --- a/mediapipe/tasks/ios/test/vision/image_classifier/MPPImageClassifierTests.m +++ b/mediapipe/tasks/ios/test/vision/image_classifier/MPPImageClassifierTests.m @@ -27,6 +27,8 @@ static NSDictionary *const kMultiObjectsRotatedImage = @{@"name" : @"multi_objects_rotated", @"type" : @"jpg"}; static const int kMobileNetCategoriesCount = 1001; static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks"; +static NSString *const kLiveStreamTestsDictImageClassifierKey = @"image_classifier"; +static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; #define AssertEqualErrors(error, expectedError) \ XCTAssertNotNil(error); \ @@ -54,11 +56,14 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks"; XCTAssertEqual(imageClassifierResult.classificationResult.classifications.count, 1); \ XCTAssertEqual(imageClassifierResult.classificationResult.classifications[0].headIndex, 0); -@interface MPPImageClassifierTests : XCTestCase +@interface MPPImageClassifierTests : XCTestCase { + NSDictionary *liveStreamSucceedsTestDict; + NSDictionary *outOfOrderTimestampTestDict; +} + @end @implementation MPPImageClassifierTests - #pragma mark Results + (NSArray *)expectedResultCategoriesForClassifyBurgerImageWithFloatModel { @@ -436,43 +441,43 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks"; #pragma mark Running Mode Tests -- (void)testCreateImageClassifierFailsWithResultListenerInNonLiveStreamMode { +- (void)testCreateImageClassifierFailsWithDelegateInNonLiveStreamMode { MPPRunningMode runningModesToTest[] = {MPPRunningModeImage, MPPRunningModeVideo}; for (int i = 0; i < sizeof(runningModesToTest) / sizeof(runningModesToTest[0]); i++) { MPPImageClassifierOptions *options = [self imageClassifierOptionsWithModelName:kFloatModelName]; options.runningMode = runningModesToTest[i]; - options.completion = ^(MPPImageClassifierResult *result, NSError *error) { - }; + options.imageClassifierLiveStreamDelegate = self; [self assertCreateImageClassifierWithOptions:options failsWithExpectedError: - [NSError - errorWithDomain:kExpectedErrorDomain - code:MPPTasksErrorCodeInvalidArgumentError - userInfo:@{ - NSLocalizedDescriptionKey : - @"The vision task is in image or video mode, a " - @"user-defined result callback should not be provided." - }]]; + [NSError errorWithDomain:kExpectedErrorDomain + code:MPPTasksErrorCodeInvalidArgumentError + userInfo:@{ + NSLocalizedDescriptionKey : + @"The vision task is in image or video mode. The " + @"delegate must not be set in the task's options." + }]]; } } -- (void)testCreateImageClassifierFailsWithMissingResultListenerInLiveStreamMode { +- (void)testCreateImageClassifierFailsWithMissingDelegateInLiveStreamMode { MPPImageClassifierOptions *options = [self imageClassifierOptionsWithModelName:kFloatModelName]; options.runningMode = MPPRunningModeLiveStream; [self assertCreateImageClassifierWithOptions:options failsWithExpectedError: - [NSError errorWithDomain:kExpectedErrorDomain - code:MPPTasksErrorCodeInvalidArgumentError - userInfo:@{ - NSLocalizedDescriptionKey : - @"The vision task is in live stream mode, a " - @"user-defined result callback must be provided." - }]]; + [NSError + errorWithDomain:kExpectedErrorDomain + code:MPPTasksErrorCodeInvalidArgumentError + userInfo:@{ + NSLocalizedDescriptionKey : + @"The vision task is in live stream mode. An object " + @"must be set as the delegate of the task in its " + @"options to ensure asynchronous delivery of results." + }]]; } - (void)testClassifyFailsWithCallingWrongApiInImageMode { @@ -553,9 +558,7 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks"; MPPImageClassifierOptions *options = [self imageClassifierOptionsWithModelName:kFloatModelName]; options.runningMode = MPPRunningModeLiveStream; - options.completion = ^(MPPImageClassifierResult *result, NSError *error) { - - }; + options.imageClassifierLiveStreamDelegate = self; MPPImageClassifier *imageClassifier = [self imageClassifierWithOptionsSucceeds:options]; @@ -619,16 +622,20 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks"; options.maxResults = maxResults; options.runningMode = MPPRunningModeLiveStream; - options.completion = ^(MPPImageClassifierResult *result, NSError *error) { - [self assertImageClassifierResult:result - hasExpectedCategoriesCount:maxResults - expectedCategories: - [MPPImageClassifierTests - expectedResultCategoriesForClassifyBurgerImageWithFloatModel]]; - }; + options.imageClassifierLiveStreamDelegate = self; + + XCTestExpectation *expectation = [[XCTestExpectation alloc] + initWithDescription:@"classifyWithOutOfOrderTimestampsAndLiveStream"]; + + expectation.expectedFulfillmentCount = 1; MPPImageClassifier *imageClassifier = [self imageClassifierWithOptionsSucceeds:options]; + outOfOrderTimestampTestDict = @{ + kLiveStreamTestsDictImageClassifierKey : imageClassifier, + kLiveStreamTestsDictExpectationKey : expectation + }; + MPPImage *image = [self imageWithFileInfo:kBurgerImage]; XCTAssertTrue([imageClassifier classifyAsyncImage:image timestampInMilliseconds:1 error:nil]); @@ -644,6 +651,8 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks"; @"INVALID_ARGUMENT: Input timestamp must be monotonically increasing." }]; AssertEqualErrors(error, expectedError); + + [self waitForExpectations:@[ expectation ] timeout:1e-2f]; } - (void)testClassifyWithLiveStreamModeSucceeds { @@ -653,24 +662,63 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks"; options.maxResults = maxResults; options.runningMode = MPPRunningModeLiveStream; - options.completion = ^(MPPImageClassifierResult *result, NSError *error) { - [self assertImageClassifierResult:result - hasExpectedCategoriesCount:maxResults - expectedCategories: - [MPPImageClassifierTests - expectedResultCategoriesForClassifyBurgerImageWithFloatModel]]; - }; + options.imageClassifierLiveStreamDelegate = self; + + NSInteger iterationCount = 100; + + // Because of flow limiting, we cannot ensure that the callback will be + // invoked `iterationCount` times. + // An normal expectation will fail if expectation.fullfill() is not called + // `expectation.expectedFulfillmentCount` times. + // If `expectation.isInverted = true`, the test will only succeed if + // expectation is not fullfilled for the specified `expectedFulfillmentCount`. + // Since in our case we cannot predict how many times the expectation is + // supposed to be fullfilled setting, + // `expectation.expectedFulfillmentCount` = `iterationCount` + 1 and + // `expectation.isInverted = true` ensures that test succeeds if + // expectation is fullfilled <= `iterationCount` times. + XCTestExpectation *expectation = + [[XCTestExpectation alloc] initWithDescription:@"classifyWithLiveStream"]; + + expectation.expectedFulfillmentCount = iterationCount + 1; + expectation.inverted = YES; MPPImageClassifier *imageClassifier = [self imageClassifierWithOptionsSucceeds:options]; + liveStreamSucceedsTestDict = @{ + kLiveStreamTestsDictImageClassifierKey : imageClassifier, + kLiveStreamTestsDictExpectationKey : expectation + }; + // TODO: Mimic initialization from CMSampleBuffer as live stream mode is most likely to be used // with the iOS camera. AVCaptureVideoDataOutput sample buffer delegates provide frames of type // `CMSampleBuffer`. MPPImage *image = [self imageWithFileInfo:kBurgerImage]; - for (int i = 0; i < 3; i++) { + for (int i = 0; i < iterationCount; i++) { XCTAssertTrue([imageClassifier classifyAsyncImage:image timestampInMilliseconds:i error:nil]); } + + [self waitForExpectations:@[ expectation ] timeout:1e-2f]; +} + +- (void)imageClassifier:(MPPImageClassifier *)imageClassifier + didFinishClassificationWithResult:(MPPImageClassifierResult *)imageClassifierResult + timestampInMilliseconds:(NSInteger)timestampInMilliseconds + error:(NSError *)error { + NSInteger maxResults = 3; + [self assertImageClassifierResult:imageClassifierResult + hasExpectedCategoriesCount:maxResults + expectedCategories: + [MPPImageClassifierTests + expectedResultCategoriesForClassifyBurgerImageWithFloatModel]]; + + if (imageClassifier == outOfOrderTimestampTestDict[kLiveStreamTestsDictImageClassifierKey]) { + [outOfOrderTimestampTestDict[kLiveStreamTestsDictExpectationKey] fulfill]; + } else if (imageClassifier == + liveStreamSucceedsTestDict[kLiveStreamTestsDictImageClassifierKey]) { + [liveStreamSucceedsTestDict[kLiveStreamTestsDictExpectationKey] fulfill]; + } } @end diff --git a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.h b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.h index 345687877..024eee0aa 100644 --- a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.h +++ b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.h @@ -164,8 +164,11 @@ NS_SWIFT_NAME(ImageClassifier) * Sends live stream image data of type `MPPImage` to perform image classification using the whole * image as region of interest. Rotation will be applied according to the `orientation` property of * the provided `MPPImage`. Only use this method when the `MPPImageClassifier` is created with - * `MPPRunningModeLiveStream`. Results are provided asynchronously via the `completion` callback - * provided in the `MPPImageClassifierOptions`. + * `MPPRunningModeLiveStream`. + * The object which needs to be continuously notified of the available results of image + * classification must confirm to `MPPImageClassifierLiveStreamDelegate` protocol and implement the + * `imageClassifier:didFinishClassificationWithResult:timestampInMilliseconds:error:` + * delegate method. * * It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent * to the image classifier. The input timestamps must be monotonically increasing. @@ -185,11 +188,14 @@ NS_SWIFT_NAME(ImageClassifier) NS_SWIFT_NAME(classifyAsync(image:timestampInMilliseconds:)); /** - * Sends live stream image data of type `MPPImage` to perform image classification, cropped to the + * Sends live stream image data of type ``MPPImage`` to perform image classification, cropped to the * specified region of interest.. Rotation will be applied according to the `orientation` property * of the provided `MPPImage`. Only use this method when the `MPPImageClassifier` is created with - * `MPPRunningModeLiveStream`. Results are provided asynchronously via the `completion` callback - * provided in the `MPPImageClassifierOptions`. + * `MPPRunningModeLiveStream`. + * The object which needs to be continuously notified of the available results of image + * classification must confirm to `MPPImageClassifierLiveStreamDelegate` protocol and implement the + * `imageClassifier:didFinishClassificationWithResult:timestampInMilliseconds:error:` delegate + * method. * * It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent * to the image classifier. The input timestamps must be monotonically increasing. diff --git a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.mm b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.mm index 3e345a5d0..408153c01 100644 --- a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.mm +++ b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.mm @@ -27,6 +27,7 @@ namespace { using ::mediapipe::NormalizedRect; using ::mediapipe::Packet; +using ::mediapipe::Timestamp; using ::mediapipe::tasks::core::PacketMap; using ::mediapipe::tasks::core::PacketsCallback; } // namespace @@ -38,9 +39,9 @@ static NSString *const kImageOutStreamName = @"image_out"; static NSString *const kImageTag = @"IMAGE"; static NSString *const kNormRectStreamName = @"norm_rect_in"; static NSString *const kNormRectTag = @"NORM_RECT"; - static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.image_classifier.ImageClassifierGraph"; +static NSString *const kTaskName = @"imageClassifier"; #define InputPacketMap(imagePacket, normalizedRectPacket) \ { \ @@ -53,6 +54,8 @@ static NSString *const kTaskGraphName = /** iOS Vision Task Runner */ MPPVisionTaskRunner *_visionTaskRunner; } +@property(nonatomic, weak) id + imageClassifierLiveStreamDelegate; @end @implementation MPPImageClassifier @@ -81,16 +84,58 @@ static NSString *const kTaskGraphName = PacketsCallback packetsCallback = nullptr; - if (options.completion) { + if (options.imageClassifierLiveStreamDelegate) { + _imageClassifierLiveStreamDelegate = options.imageClassifierLiveStreamDelegate; + // Capturing `self` as weak in order to avoid `self` being kept in memory + // and cause a retain cycle, after self is set to `nil`. + MPPImageClassifier *__weak weakSelf = self; + + // Create a private serial dispatch queue in which the deleagte method will be called + // asynchronously. This is to ensure that if the client performs a long running operation in + // the delegate method, the queue on which the C++ callbacks is invoked is not blocked and is + // freed up to continue with its operations. + const char *queueName = [MPPVisionTaskRunner uniqueDispatchQueueNameWithSuffix:kTaskName]; + dispatch_queue_t callbackQueue = dispatch_queue_create(queueName, NULL); packetsCallback = [=](absl::StatusOr status_or_packets) { - NSError *callbackError = nil; - MPPImageClassifierResult *result; - if ([MPPCommonUtils checkCppError:status_or_packets.status() toError:&callbackError]) { - result = [MPPImageClassifierResult - imageClassifierResultWithClassificationsPacket: - status_or_packets.value()[kClassificationsStreamName.cppString]]; + if (!weakSelf) { + return; } - options.completion(result, callbackError); + if (![weakSelf.imageClassifierLiveStreamDelegate + respondsToSelector:@selector + (imageClassifier: + didFinishClassificationWithResult:timestampInMilliseconds:error:)]) { + return; + } + + NSError *callbackError = nil; + if (![MPPCommonUtils checkCppError:status_or_packets.status() toError:&callbackError]) { + dispatch_async(callbackQueue, ^{ + [weakSelf.imageClassifierLiveStreamDelegate imageClassifier:weakSelf + didFinishClassificationWithResult:nil + timestampInMilliseconds:Timestamp::Unset().Value() + error:callbackError]; + }); + return; + } + + PacketMap &outputPacketMap = status_or_packets.value(); + if (outputPacketMap[kImageOutStreamName.cppString].IsEmpty()) { + return; + } + + MPPImageClassifierResult *result = + [MPPImageClassifierResult imageClassifierResultWithClassificationsPacket: + outputPacketMap[kClassificationsStreamName.cppString]]; + + NSInteger timeStampInMilliseconds = + outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() / + kMicroSecondsPerMilliSecond; + dispatch_async(callbackQueue, ^{ + [weakSelf.imageClassifierLiveStreamDelegate imageClassifier:weakSelf + didFinishClassificationWithResult:result + timestampInMilliseconds:timeStampInMilliseconds + error:callbackError]; + }); }; } diff --git a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierOptions.h b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierOptions.h index 2e6022041..fc76560c2 100644 --- a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierOptions.h +++ b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierOptions.h @@ -20,20 +20,68 @@ NS_ASSUME_NONNULL_BEGIN +@class MPPImageClassifier; + +/** + * This protocol defines an interface for the delegates of `MPPImageClassifier` object to receive + * results of asynchronous classification of images + * (i.e, when `runningMode = MPPRunningModeLiveStream`). + * + * The delegate of `MPPImageClassifier` must adopt `MPPImageClassifierLiveStreamDelegate` protocol. + * The methods in this protocol are optional. + */ +NS_SWIFT_NAME(ImageClassifierLiveStreamDelegate) +@protocol MPPImageClassifierLiveStreamDelegate + +@optional +/** + * This method notifies a delegate that the results of asynchronous classification of + * an image submitted to the `MPPImageClassifier` is available. + * + * This method is called on a private serial queue created by the `MPPImageClassifier` + * for performing the asynchronous delegates calls. + * + * @param imageClassifier The image classifier which performed the classification. + * This is useful to test equality when there are multiple instances of `MPPImageClassifier`. + * @param result An `MPPImageClassifierResult` object that contains a list of image classifications. + * @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input + * image was sent to the image classifier. + * @param error An optional error parameter populated when there is an error in performing image + * classification on the input live stream image data. + * + */ +- (void)imageClassifier:(MPPImageClassifier *)imageClassifier + didFinishClassificationWithResult:(nullable MPPImageClassifierResult *)result + timestampInMilliseconds:(NSInteger)timestampInMilliseconds + error:(nullable NSError *)error + NS_SWIFT_NAME(imageClassifier(_:didFinishClassification:timestampInMilliseconds:error:)); +@end + /** * Options for setting up a `MPPImageClassifier`. */ NS_SWIFT_NAME(ImageClassifierOptions) @interface MPPImageClassifierOptions : MPPTaskOptions +/** + * Running mode of the image classifier task. Defaults to `MPPRunningModeImage`. + * `MPPImageClassifier` can be created with one of the following running modes: + * 1. `MPPRunningModeImage`: The mode for performing classification on single image inputs. + * 2. `MPPRunningModeVideo`: The mode for performing classification on the decoded frames of a + * video. + * 3. `MPPRunningModeLiveStream`: The mode for performing classification on a live stream of input + * data, such as from the camera. + */ @property(nonatomic) MPPRunningMode runningMode; /** - * The user-defined result callback for processing live stream data. The result callback should only - * be specified when the running mode is set to the live stream mode. - * TODO: Add parameter `MPPImage` in the callback. + * An object that confirms to `MPPImageClassifierLiveStreamDelegate` protocol. This object must + * implement `objectDetector:didFinishDetectionWithResult:timestampInMilliseconds:error:` to receive + * the results of asynchronous classification on images (i.e, when `runningMode = + * MPPRunningModeLiveStream`). */ -@property(nonatomic, copy) void (^completion)(MPPImageClassifierResult *result, NSError *error); +@property(nonatomic, weak, nullable) id + imageClassifierLiveStreamDelegate; /** * The locale to use for display names specified through the TFLite Model Metadata, if any. Defaults diff --git a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierOptions.m b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierOptions.m index e109dcc3b..8d3815ff3 100644 --- a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierOptions.m +++ b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierOptions.m @@ -33,7 +33,7 @@ imageClassifierOptions.categoryDenylist = self.categoryDenylist; imageClassifierOptions.categoryAllowlist = self.categoryAllowlist; imageClassifierOptions.displayNamesLocale = self.displayNamesLocale; - imageClassifierOptions.completion = self.completion; + imageClassifierOptions.imageClassifierLiveStreamDelegate = self.imageClassifierLiveStreamDelegate; return imageClassifierOptions; } From e47bb165442a61a93af2d83d0c081bc00c6b215a Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Thu, 4 May 2023 16:52:58 +0530 Subject: [PATCH 03/17] Added validation of C++ image classification result packet in MPPImageClassifierResult+Helpers.mm --- .../sources/MPPImageClassifierResult+Helpers.h | 4 +++- .../sources/MPPImageClassifierResult+Helpers.mm | 13 +++++++++---- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/mediapipe/tasks/ios/vision/image_classifier/utils/sources/MPPImageClassifierResult+Helpers.h b/mediapipe/tasks/ios/vision/image_classifier/utils/sources/MPPImageClassifierResult+Helpers.h index 0375ac2a5..68d939f45 100644 --- a/mediapipe/tasks/ios/vision/image_classifier/utils/sources/MPPImageClassifierResult+Helpers.h +++ b/mediapipe/tasks/ios/vision/image_classifier/utils/sources/MPPImageClassifierResult+Helpers.h @@ -18,6 +18,8 @@ NS_ASSUME_NONNULL_BEGIN +static const int kMicroSecondsPerMilliSecond = 1000; + @interface MPPImageClassifierResult (Helpers) /** @@ -28,7 +30,7 @@ NS_ASSUME_NONNULL_BEGIN * * @return An `MPPImageClassifierResult` object that contains a list of image classifications. */ -+ (MPPImageClassifierResult *)imageClassifierResultWithClassificationsPacket: ++ (nullable MPPImageClassifierResult *)imageClassifierResultWithClassificationsPacket: (const mediapipe::Packet &)packet; @end diff --git a/mediapipe/tasks/ios/vision/image_classifier/utils/sources/MPPImageClassifierResult+Helpers.mm b/mediapipe/tasks/ios/vision/image_classifier/utils/sources/MPPImageClassifierResult+Helpers.mm index f5199765d..f43ad0d7a 100644 --- a/mediapipe/tasks/ios/vision/image_classifier/utils/sources/MPPImageClassifierResult+Helpers.mm +++ b/mediapipe/tasks/ios/vision/image_classifier/utils/sources/MPPImageClassifierResult+Helpers.mm @@ -17,8 +17,6 @@ #include "mediapipe/tasks/cc/components/containers/proto/classifications.pb.h" -static const int kMicroSecondsPerMilliSecond = 1000; - namespace { using ClassificationResultProto = ::mediapipe::tasks::components::containers::proto::ClassificationResult; @@ -27,9 +25,16 @@ using ::mediapipe::Packet; @implementation MPPImageClassifierResult (Helpers) -+ (MPPImageClassifierResult *)imageClassifierResultWithClassificationsPacket: ++ (nullable MPPImageClassifierResult *)imageClassifierResultWithClassificationsPacket: (const Packet &)packet { - MPPClassificationResult *classificationResult = [MPPClassificationResult + MPPClassificationResult *classificationResult; + MPPImageClassifierResult *imageClassifierResult; + + if (!packet.ValidateAsType().ok()) { + return nil; + } + + classificationResult = [MPPClassificationResult classificationResultWithProto:packet.Get()]; return [[MPPImageClassifierResult alloc] From 1136d4d5156b02749819c691c740fe0010488d3e Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Thu, 4 May 2023 18:58:49 +0530 Subject: [PATCH 04/17] Updated CVPixelBuffer to support pixel format type of 32RGBA --- mediapipe/tasks/ios/vision/core/utils/sources/MPPImage+Utils.mm | 1 + 1 file changed, 1 insertion(+) diff --git a/mediapipe/tasks/ios/vision/core/utils/sources/MPPImage+Utils.mm b/mediapipe/tasks/ios/vision/core/utils/sources/MPPImage+Utils.mm index 75dfcc650..5258b540f 100644 --- a/mediapipe/tasks/ios/vision/core/utils/sources/MPPImage+Utils.mm +++ b/mediapipe/tasks/ios/vision/core/utils/sources/MPPImage+Utils.mm @@ -162,6 +162,7 @@ using ::mediapipe::ImageFrame; OSType pixelBufferFormat = CVPixelBufferGetPixelFormatType(pixelBuffer); switch (pixelBufferFormat) { + case kCVPixelFormatType_32RGBA: case kCVPixelFormatType_32BGRA: { return [MPPCVPixelBufferUtils rgbImageFrameFromCVPixelBuffer:pixelBuffer error:error]; } From d401439daa2f1791265b34d3a81d6466c7f9c125 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Thu, 4 May 2023 19:22:11 +0530 Subject: [PATCH 05/17] Updated formatting --- .../tasks/ios/vision/core/utils/sources/MPPImage+Utils.mm | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/mediapipe/tasks/ios/vision/core/utils/sources/MPPImage+Utils.mm b/mediapipe/tasks/ios/vision/core/utils/sources/MPPImage+Utils.mm index 5258b540f..667279b9f 100644 --- a/mediapipe/tasks/ios/vision/core/utils/sources/MPPImage+Utils.mm +++ b/mediapipe/tasks/ios/vision/core/utils/sources/MPPImage+Utils.mm @@ -170,7 +170,8 @@ using ::mediapipe::ImageFrame; [MPPCommonUtils createCustomError:error withCode:MPPTasksErrorCodeInvalidArgumentError description:@"Unsupported pixel format for CVPixelBuffer. Supported " - @"pixel format types are kCVPixelFormatType_32BGRA"]; + @"pixel format types are kCVPixelFormatType_32BGRA and " + @"kCVPixelFormatType_32RGBA"]; } } From ddd1515f88f5f7a9ab316eaff19f59f2a1ce9949 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Thu, 4 May 2023 19:40:15 +0530 Subject: [PATCH 06/17] Updated documentation --- .../sources/MPPImageClassifier.h | 66 +++++++++++++++++++ 1 file changed, 66 insertions(+) diff --git a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.h b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.h index 024eee0aa..549fa9fa4 100644 --- a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.h +++ b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.h @@ -85,6 +85,14 @@ NS_SWIFT_NAME(ImageClassifier) * interest. Rotation will be applied according to the `orientation` property of the provided * `MPPImage`. Only use this method when the `MPPImageClassifier` is created with * `MPPRunningModeImage`. + * This method supports classification of RGBA images. If your `MPPImage` has a source type of + * `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer + * must have one of the following pixel format types: + * 1. kCVPixelFormatType_32BGRA + * 2. kCVPixelFormatType_32RGBA + * + * If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is + * RGB with an Alpha channel. * * @param image The `MPPImage` on which image classification is to be performed. * @param error An optional error parameter populated when there is an error in performing image @@ -102,6 +110,15 @@ NS_SWIFT_NAME(ImageClassifier) * of the provided `MPPImage`. Only use this method when the `MPPImageClassifier` is created with * `MPPRunningModeImage`. * + * This method supports classification of RGBA images. If your `MPPImage` has a source type of + * `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer + * must have one of the following pixel format types: + * 1. kCVPixelFormatType_32BGRA + * 2. kCVPixelFormatType_32RGBA + * + * If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is + * RGB with an Alpha channel. + * * @param image The `MPPImage` on which image classification is to be performed. * @param roi A `CGRect` specifying the region of interest within the given `MPPImage`, on which * image classification should be performed. @@ -121,6 +138,18 @@ NS_SWIFT_NAME(ImageClassifier) * the provided `MPPImage`. Only use this method when the `MPPImageClassifier` is created with * `MPPRunningModeVideo`. * + * It's required to provide the video frame's timestamp (in milliseconds). The input timestamps must + * be monotonically increasing. + * + * This method supports classification of RGBA images. If your `MPPImage` has a source type of + * `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer + * must have one of the following pixel format types: + * 1. kCVPixelFormatType_32BGRA + * 2. kCVPixelFormatType_32RGBA + * + * If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is + * RGB with an Alpha channel. + * * @param image The `MPPImage` on which image classification is to be performed. * @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input * timestamps must be monotonically increasing. @@ -143,6 +172,15 @@ NS_SWIFT_NAME(ImageClassifier) * It's required to provide the video frame's timestamp (in milliseconds). The input timestamps must * be monotonically increasing. * + * This method supports classification of RGBA images. If your `MPPImage` has a source type of + * `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer + * must have one of the following pixel format types: + * 1. kCVPixelFormatType_32BGRA + * 2. kCVPixelFormatType_32RGBA + * + * If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is + * RGB with an Alpha channel. + * * @param image A live stream image data of type `MPPImage` on which image classification is to be * performed. * @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input @@ -165,6 +203,7 @@ NS_SWIFT_NAME(ImageClassifier) * image as region of interest. Rotation will be applied according to the `orientation` property of * the provided `MPPImage`. Only use this method when the `MPPImageClassifier` is created with * `MPPRunningModeLiveStream`. + * * The object which needs to be continuously notified of the available results of image * classification must confirm to `MPPImageClassifierLiveStreamDelegate` protocol and implement the * `imageClassifier:didFinishClassificationWithResult:timestampInMilliseconds:error:` @@ -173,6 +212,19 @@ NS_SWIFT_NAME(ImageClassifier) * It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent * to the image classifier. The input timestamps must be monotonically increasing. * + * This method supports classification of RGBA images. If your `MPPImage` has a source type of + * `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer + * must have one of the following pixel format types: + * 1. kCVPixelFormatType_32BGRA + * 2. kCVPixelFormatType_32RGBA + * + * If the input `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color + * space is RGB with an Alpha channel. + * + * If this method is used for classifying live camera frames using `AVFoundation`, ensure that you + * request `AVCaptureVideoDataOutput` to output frames in `kCMPixelFormat_32RGBA` using its + * `videoSettings` property. + * * @param image A live stream image data of type `MPPImage` on which image classification is to be * performed. * @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input @@ -192,6 +244,7 @@ NS_SWIFT_NAME(ImageClassifier) * specified region of interest.. Rotation will be applied according to the `orientation` property * of the provided `MPPImage`. Only use this method when the `MPPImageClassifier` is created with * `MPPRunningModeLiveStream`. + * * The object which needs to be continuously notified of the available results of image * classification must confirm to `MPPImageClassifierLiveStreamDelegate` protocol and implement the * `imageClassifier:didFinishClassificationWithResult:timestampInMilliseconds:error:` delegate @@ -199,6 +252,19 @@ NS_SWIFT_NAME(ImageClassifier) * * It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent * to the image classifier. The input timestamps must be monotonically increasing. + * + * This method supports classification of RGBA images. If your `MPPImage` has a source type of + * `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer + * must have one of the following pixel format types: + * 1. kCVPixelFormatType_32BGRA + * 2. kCVPixelFormatType_32RGBA + * + * If the input `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color + * space is RGB with an Alpha channel. + * + * If this method is used for classifying live camera frames using `AVFoundation`, ensure that you + * request `AVCaptureVideoDataOutput` to output frames in `kCMPixelFormat_32RGBA` using its + * `videoSettings` property. * * @param image A live stream image data of type `MPPImage` on which image classification is to be * performed. From 33ae23c53aacd144377c7cf37850021ed600a46a Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Thu, 4 May 2023 19:51:54 +0530 Subject: [PATCH 07/17] Increased wait time for image classifier asynchronous tests --- .../test/vision/image_classifier/MPPImageClassifierTests.m | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mediapipe/tasks/ios/test/vision/image_classifier/MPPImageClassifierTests.m b/mediapipe/tasks/ios/test/vision/image_classifier/MPPImageClassifierTests.m index a2fd68482..d873d409a 100644 --- a/mediapipe/tasks/ios/test/vision/image_classifier/MPPImageClassifierTests.m +++ b/mediapipe/tasks/ios/test/vision/image_classifier/MPPImageClassifierTests.m @@ -652,7 +652,7 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; }]; AssertEqualErrors(error, expectedError); - [self waitForExpectations:@[ expectation ] timeout:1e-2f]; + [self waitForExpectations:@[ expectation ] timeout:0.5f]; } - (void)testClassifyWithLiveStreamModeSucceeds { @@ -699,7 +699,7 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; XCTAssertTrue([imageClassifier classifyAsyncImage:image timestampInMilliseconds:i error:nil]); } - [self waitForExpectations:@[ expectation ] timeout:1e-2f]; + [self waitForExpectations:@[ expectation ] timeout:0.5f]; } - (void)imageClassifier:(MPPImageClassifier *)imageClassifier From 08282d9fd78057bd0727848fd7b729b8ef9362c0 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Thu, 4 May 2023 19:59:39 +0530 Subject: [PATCH 08/17] Updated time out for image classifier async tests --- .../test/vision/image_classifier/MPPImageClassifierTests.m | 6 ++++-- .../test/vision/object_detector/MPPObjectDetectorTests.m | 2 +- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/mediapipe/tasks/ios/test/vision/image_classifier/MPPImageClassifierTests.m b/mediapipe/tasks/ios/test/vision/image_classifier/MPPImageClassifierTests.m index d873d409a..7eb93df8e 100644 --- a/mediapipe/tasks/ios/test/vision/image_classifier/MPPImageClassifierTests.m +++ b/mediapipe/tasks/ios/test/vision/image_classifier/MPPImageClassifierTests.m @@ -652,7 +652,8 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; }]; AssertEqualErrors(error, expectedError); - [self waitForExpectations:@[ expectation ] timeout:0.5f]; + NSTimeInterval timeout = 0.5f; + [self waitForExpectations:@[ expectation ] timeout:timeout]; } - (void)testClassifyWithLiveStreamModeSucceeds { @@ -699,7 +700,8 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; XCTAssertTrue([imageClassifier classifyAsyncImage:image timestampInMilliseconds:i error:nil]); } - [self waitForExpectations:@[ expectation ] timeout:0.5f]; + NSTimeInterval timeout = 0.5f; + [self waitForExpectations:@[ expectation ] timeout:timeout]; } - (void)imageClassifier:(MPPImageClassifier *)imageClassifier diff --git a/mediapipe/tasks/ios/test/vision/object_detector/MPPObjectDetectorTests.m b/mediapipe/tasks/ios/test/vision/object_detector/MPPObjectDetectorTests.m index fd9466b7d..d34078744 100644 --- a/mediapipe/tasks/ios/test/vision/object_detector/MPPObjectDetectorTests.m +++ b/mediapipe/tasks/ios/test/vision/object_detector/MPPObjectDetectorTests.m @@ -715,7 +715,7 @@ static const float scoreDifferenceTolerance = 0.02f; XCTAssertTrue([objectDetector detectAsyncInImage:image timestampInMilliseconds:i error:nil]); } - [self waitForExpectations:@[ expectation ] timeout:0.5]; + [self waitForExpectations:@[ expectation ] timeout:0.5f]; } @end From 3df4f7db6458b64b5055c5300448872e0a4f5648 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Thu, 4 May 2023 20:00:29 +0530 Subject: [PATCH 09/17] Updated time out for object detector --- .../test/vision/object_detector/MPPObjectDetectorTests.m | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/mediapipe/tasks/ios/test/vision/object_detector/MPPObjectDetectorTests.m b/mediapipe/tasks/ios/test/vision/object_detector/MPPObjectDetectorTests.m index d34078744..d3b81703b 100644 --- a/mediapipe/tasks/ios/test/vision/object_detector/MPPObjectDetectorTests.m +++ b/mediapipe/tasks/ios/test/vision/object_detector/MPPObjectDetectorTests.m @@ -664,7 +664,9 @@ static const float scoreDifferenceTolerance = 0.02f; @"INVALID_ARGUMENT: Input timestamp must be monotonically increasing." }]; AssertEqualErrors(error, expectedError); - [self waitForExpectations:@[ expectation ] timeout:1.0]; + + NSTimeInterval timeout = 0.5f; + [self waitForExpectations:@[ expectation ] timeout:timeout]; } - (void)testDetectWithLiveStreamModeSucceeds { @@ -715,7 +717,8 @@ static const float scoreDifferenceTolerance = 0.02f; XCTAssertTrue([objectDetector detectAsyncInImage:image timestampInMilliseconds:i error:nil]); } - [self waitForExpectations:@[ expectation ] timeout:0.5f]; + NSTimeInterval timeout = 0.5f; + [self waitForExpectations:@[ expectation ] timeout:timeout]; } @end From 47013d289eda63967457bd134dc479837efb6a56 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Thu, 4 May 2023 23:19:12 +0530 Subject: [PATCH 10/17] Added flow limiter calculator in MediaPipeTasksCommon --- mediapipe/tasks/ios/BUILD | 2 ++ 1 file changed, 2 insertions(+) diff --git a/mediapipe/tasks/ios/BUILD b/mediapipe/tasks/ios/BUILD index c839acd84..e05f03d61 100644 --- a/mediapipe/tasks/ios/BUILD +++ b/mediapipe/tasks/ios/BUILD @@ -42,6 +42,7 @@ OBJC_COMMON_DEPS = [ "//mediapipe/tasks/ios/components/containers:MPPClassificationResult", "//mediapipe/tasks/ios/components/containers:MPPCategory", "//mediapipe/tasks/ios/common/utils:MPPCommonUtils", + "//mediapipe/calculators/core:flow_limiter_calculator", "//mediapipe/tasks/cc/vision/image_classifier:image_classifier_graph", "//mediapipe/tasks/cc/vision/object_detector:object_detector_graph", "//mediapipe/tasks/cc/text/text_classifier:text_classifier_graph", @@ -172,6 +173,7 @@ apple_static_library( minimum_os_version = MPP_TASK_MINIMUM_OS_VERSION, platform_type = "ios", deps = [ + "//mediapipe/calculators/core:flow_limiter_calculator", "//mediapipe/tasks/cc/text/text_classifier:text_classifier_graph", "//mediapipe/tasks/cc/text/text_embedder:text_embedder_graph", "//mediapipe/tasks/cc/vision/image_classifier:image_classifier_graph", From 330976ce9ec18399bb2b483840845ef51313f403 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Thu, 4 May 2023 23:51:41 +0530 Subject: [PATCH 11/17] Added utils of containers and core to MPPTaskCommon to avoid warnings in xcode --- mediapipe/tasks/ios/BUILD | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/mediapipe/tasks/ios/BUILD b/mediapipe/tasks/ios/BUILD index e05f03d61..6d18baf3d 100644 --- a/mediapipe/tasks/ios/BUILD +++ b/mediapipe/tasks/ios/BUILD @@ -34,13 +34,13 @@ licenses(["notice"]) # 2. Task graphs. These will be built with ":MediaPipeTaskGraphs_library". # 3. gpu targets which will be built with the ":MediaPipeTaskGraphs_library". OBJC_COMMON_DEPS = [ - "//mediapipe/tasks/ios/core:MPPBaseOptions", + "//mediapipe/tasks/ios/core/utils:MPPBaseOptionsHelpers", "//mediapipe/tasks/ios/core:MPPTaskInfo", "//mediapipe/tasks/ios/core:MPPTaskOptions", "//mediapipe/tasks/ios/core:MPPTaskResult", "//mediapipe/tasks/ios/core:MPPTaskRunner", - "//mediapipe/tasks/ios/components/containers:MPPClassificationResult", - "//mediapipe/tasks/ios/components/containers:MPPCategory", + "//mediapipe/tasks/ios/components/containers/utils:MPPClassificationResultHelpers", + "//mediapipe/tasks/ios/components/containers/utils:MPPCategoryHelpers", "//mediapipe/tasks/ios/common/utils:MPPCommonUtils", "//mediapipe/calculators/core:flow_limiter_calculator", "//mediapipe/tasks/cc/vision/image_classifier:image_classifier_graph", @@ -204,9 +204,9 @@ apple_static_xcframework( }, deps = [ "//mediapipe/tasks/ios/common/utils:MPPCommonUtils", - "//mediapipe/tasks/ios/components/containers:MPPCategory", - "//mediapipe/tasks/ios/components/containers:MPPClassificationResult", - "//mediapipe/tasks/ios/core:MPPBaseOptions", + "//mediapipe/tasks/ios/components/containers/utils:MPPCategoryHelpers", + "//mediapipe/tasks/ios/components/containers/utils:MPPClassificationResultHelpers", + "//mediapipe/tasks/ios/core/utils:MPPBaseOptionsHelpers", "//mediapipe/tasks/ios/core:MPPTaskInfo", "//mediapipe/tasks/ios/core:MPPTaskOptions", "//mediapipe/tasks/ios/core:MPPTaskResult", From 26810b6b84f35d30f90b90065b31668ab770ec8b Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Mon, 8 May 2023 16:30:40 +0530 Subject: [PATCH 12/17] Reverted back to using containers and options in BUILD --- mediapipe/tasks/ios/BUILD | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/mediapipe/tasks/ios/BUILD b/mediapipe/tasks/ios/BUILD index 6d18baf3d..65b6507d2 100644 --- a/mediapipe/tasks/ios/BUILD +++ b/mediapipe/tasks/ios/BUILD @@ -34,13 +34,13 @@ licenses(["notice"]) # 2. Task graphs. These will be built with ":MediaPipeTaskGraphs_library". # 3. gpu targets which will be built with the ":MediaPipeTaskGraphs_library". OBJC_COMMON_DEPS = [ - "//mediapipe/tasks/ios/core/utils:MPPBaseOptionsHelpers", + "//mediapipe/tasks/ios/core/utils:MPPBaseOptions", "//mediapipe/tasks/ios/core:MPPTaskInfo", "//mediapipe/tasks/ios/core:MPPTaskOptions", "//mediapipe/tasks/ios/core:MPPTaskResult", "//mediapipe/tasks/ios/core:MPPTaskRunner", - "//mediapipe/tasks/ios/components/containers/utils:MPPClassificationResultHelpers", - "//mediapipe/tasks/ios/components/containers/utils:MPPCategoryHelpers", + "//mediapipe/tasks/ios/components/containers/utils:MPPClassificationResult", + "//mediapipe/tasks/ios/components/containers/utils:MPPCategory", "//mediapipe/tasks/ios/common/utils:MPPCommonUtils", "//mediapipe/calculators/core:flow_limiter_calculator", "//mediapipe/tasks/cc/vision/image_classifier:image_classifier_graph", @@ -204,9 +204,9 @@ apple_static_xcframework( }, deps = [ "//mediapipe/tasks/ios/common/utils:MPPCommonUtils", - "//mediapipe/tasks/ios/components/containers/utils:MPPCategoryHelpers", - "//mediapipe/tasks/ios/components/containers/utils:MPPClassificationResultHelpers", - "//mediapipe/tasks/ios/core/utils:MPPBaseOptionsHelpers", + "//mediapipe/tasks/ios/components/containers/utils:MPPCategory", + "//mediapipe/tasks/ios/components/containers/utils:MPPClassificationResult", + "//mediapipe/tasks/ios/core/utils:MPPBaseOptions", "//mediapipe/tasks/ios/core:MPPTaskInfo", "//mediapipe/tasks/ios/core:MPPTaskOptions", "//mediapipe/tasks/ios/core:MPPTaskResult", From 18656434869dee030cd2ea6e1368fa469f14f9d4 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Mon, 8 May 2023 16:32:04 +0530 Subject: [PATCH 13/17] Fixed deps in ios task BUILD file --- mediapipe/tasks/ios/BUILD | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/mediapipe/tasks/ios/BUILD b/mediapipe/tasks/ios/BUILD index 65b6507d2..e05f03d61 100644 --- a/mediapipe/tasks/ios/BUILD +++ b/mediapipe/tasks/ios/BUILD @@ -34,13 +34,13 @@ licenses(["notice"]) # 2. Task graphs. These will be built with ":MediaPipeTaskGraphs_library". # 3. gpu targets which will be built with the ":MediaPipeTaskGraphs_library". OBJC_COMMON_DEPS = [ - "//mediapipe/tasks/ios/core/utils:MPPBaseOptions", + "//mediapipe/tasks/ios/core:MPPBaseOptions", "//mediapipe/tasks/ios/core:MPPTaskInfo", "//mediapipe/tasks/ios/core:MPPTaskOptions", "//mediapipe/tasks/ios/core:MPPTaskResult", "//mediapipe/tasks/ios/core:MPPTaskRunner", - "//mediapipe/tasks/ios/components/containers/utils:MPPClassificationResult", - "//mediapipe/tasks/ios/components/containers/utils:MPPCategory", + "//mediapipe/tasks/ios/components/containers:MPPClassificationResult", + "//mediapipe/tasks/ios/components/containers:MPPCategory", "//mediapipe/tasks/ios/common/utils:MPPCommonUtils", "//mediapipe/calculators/core:flow_limiter_calculator", "//mediapipe/tasks/cc/vision/image_classifier:image_classifier_graph", @@ -204,9 +204,9 @@ apple_static_xcframework( }, deps = [ "//mediapipe/tasks/ios/common/utils:MPPCommonUtils", - "//mediapipe/tasks/ios/components/containers/utils:MPPCategory", - "//mediapipe/tasks/ios/components/containers/utils:MPPClassificationResult", - "//mediapipe/tasks/ios/core/utils:MPPBaseOptions", + "//mediapipe/tasks/ios/components/containers:MPPCategory", + "//mediapipe/tasks/ios/components/containers:MPPClassificationResult", + "//mediapipe/tasks/ios/core:MPPBaseOptions", "//mediapipe/tasks/ios/core:MPPTaskInfo", "//mediapipe/tasks/ios/core:MPPTaskOptions", "//mediapipe/tasks/ios/core:MPPTaskResult", From 946042aca119c9a0ab321d5e51cf869111f46836 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Mon, 8 May 2023 16:33:09 +0530 Subject: [PATCH 14/17] Reverted addition of flow limiter calculator in image classifier iOS --- mediapipe/tasks/ios/BUILD | 2 -- 1 file changed, 2 deletions(-) diff --git a/mediapipe/tasks/ios/BUILD b/mediapipe/tasks/ios/BUILD index e05f03d61..c839acd84 100644 --- a/mediapipe/tasks/ios/BUILD +++ b/mediapipe/tasks/ios/BUILD @@ -42,7 +42,6 @@ OBJC_COMMON_DEPS = [ "//mediapipe/tasks/ios/components/containers:MPPClassificationResult", "//mediapipe/tasks/ios/components/containers:MPPCategory", "//mediapipe/tasks/ios/common/utils:MPPCommonUtils", - "//mediapipe/calculators/core:flow_limiter_calculator", "//mediapipe/tasks/cc/vision/image_classifier:image_classifier_graph", "//mediapipe/tasks/cc/vision/object_detector:object_detector_graph", "//mediapipe/tasks/cc/text/text_classifier:text_classifier_graph", @@ -173,7 +172,6 @@ apple_static_library( minimum_os_version = MPP_TASK_MINIMUM_OS_VERSION, platform_type = "ios", deps = [ - "//mediapipe/calculators/core:flow_limiter_calculator", "//mediapipe/tasks/cc/text/text_classifier:text_classifier_graph", "//mediapipe/tasks/cc/text/text_embedder:text_embedder_graph", "//mediapipe/tasks/cc/vision/image_classifier:image_classifier_graph", From 443418f6d5891039630f2ca32c0e3af0f521d1aa Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Mon, 8 May 2023 16:45:16 +0530 Subject: [PATCH 15/17] Updated formatting --- .../vision/object_detector/sources/MPPObjectDetectorOptions.h | 1 - 1 file changed, 1 deletion(-) diff --git a/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetectorOptions.h b/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetectorOptions.h index c91e170c9..bf2e02326 100644 --- a/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetectorOptions.h +++ b/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetectorOptions.h @@ -52,7 +52,6 @@ NS_SWIFT_NAME(ObjectDetectorLiveStreamDelegate) * image was sent to the object detector. * @param error An optional error parameter populated when there is an error in performing object * detection on the input live stream image data. - * */ - (void)objectDetector:(MPPObjectDetector *)objectDetector didFinishDetectionWithResult:(nullable MPPObjectDetectionResult *)result From db732e2913c2f771aa8409475a6187488bd0e13d Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Mon, 8 May 2023 16:57:17 +0530 Subject: [PATCH 16/17] Updated formatting in MPPImageClassifierOptions --- .../image_classifier/sources/MPPImageClassifierOptions.h | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierOptions.h b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierOptions.h index fc76560c2..058c21aed 100644 --- a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierOptions.h +++ b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierOptions.h @@ -24,8 +24,8 @@ NS_ASSUME_NONNULL_BEGIN /** * This protocol defines an interface for the delegates of `MPPImageClassifier` object to receive - * results of asynchronous classification of images - * (i.e, when `runningMode = MPPRunningModeLiveStream`). + * results of asynchronous classification of images (i.e, when `runningMode = + * MPPRunningModeLiveStream`). * * The delegate of `MPPImageClassifier` must adopt `MPPImageClassifierLiveStreamDelegate` protocol. * The methods in this protocol are optional. @@ -48,7 +48,6 @@ NS_SWIFT_NAME(ImageClassifierLiveStreamDelegate) * image was sent to the image classifier. * @param error An optional error parameter populated when there is an error in performing image * classification on the input live stream image data. - * */ - (void)imageClassifier:(MPPImageClassifier *)imageClassifier didFinishClassificationWithResult:(nullable MPPImageClassifierResult *)result From 4a192a6d8770b577e4d0750d4e35b2a46d9bf272 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Mon, 8 May 2023 16:58:00 +0530 Subject: [PATCH 17/17] Updated formatting in MPPImageClassifier --- .../ios/vision/image_classifier/sources/MPPImageClassifier.h | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.h b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.h index 549fa9fa4..398236bbd 100644 --- a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.h +++ b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.h @@ -244,7 +244,7 @@ NS_SWIFT_NAME(ImageClassifier) * specified region of interest.. Rotation will be applied according to the `orientation` property * of the provided `MPPImage`. Only use this method when the `MPPImageClassifier` is created with * `MPPRunningModeLiveStream`. - * + * * The object which needs to be continuously notified of the available results of image * classification must confirm to `MPPImageClassifierLiveStreamDelegate` protocol and implement the * `imageClassifier:didFinishClassificationWithResult:timestampInMilliseconds:error:` delegate @@ -252,7 +252,7 @@ NS_SWIFT_NAME(ImageClassifier) * * It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent * to the image classifier. The input timestamps must be monotonically increasing. - * + * * This method supports classification of RGBA images. If your `MPPImage` has a source type of * `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer * must have one of the following pixel format types: