diff --git a/mediapipe/tasks/ios/test/vision/object_detector/MPPObjectDetectorTests.m b/mediapipe/tasks/ios/test/vision/object_detector/MPPObjectDetectorTests.m index b106db024..9ccfece91 100644 --- a/mediapipe/tasks/ios/test/vision/object_detector/MPPObjectDetectorTests.m +++ b/mediapipe/tasks/ios/test/vision/object_detector/MPPObjectDetectorTests.m @@ -60,7 +60,7 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; XCTAssertEqualWithAccuracy(boundingBox.size.height, expectedBoundingBox.size.height, \ pixelDifferenceTolerance, @"index i = %d", idx); -@interface MPPObjectDetectorTests : XCTestCase { +@interface MPPObjectDetectorTests : XCTestCase { NSDictionary *liveStreamSucceedsTestDict; NSDictionary *outOfOrderTimestampTestDict; } @@ -457,7 +457,7 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; MPPObjectDetectorOptions *options = [self objectDetectorOptionsWithModelName:kModelName]; options.runningMode = runningModesToTest[i]; - options.objectDetectorDelegate = self; + options.objectDetectorLiveStreamDelegate = self; [self assertCreateObjectDetectorWithOptions:options @@ -568,7 +568,7 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; MPPObjectDetectorOptions *options = [self objectDetectorOptionsWithModelName:kModelName]; options.runningMode = MPPRunningModeLiveStream; - options.objectDetectorDelegate = self; + options.objectDetectorLiveStreamDelegate = self; MPPObjectDetector *objectDetector = [self objectDetectorWithOptionsSucceeds:options]; @@ -633,7 +633,7 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; options.maxResults = maxResults; options.runningMode = MPPRunningModeLiveStream; - options.objectDetectorDelegate = self; + options.objectDetectorLiveStreamDelegate = self; XCTestExpectation *expectation = [[XCTestExpectation alloc] initWithDescription:@"detectWithOutOfOrderTimestampsAndLiveStream"]; @@ -689,7 +689,7 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; expectation.expectedFulfillmentCount = iterationCount + 1; expectation.inverted = YES; - options.objectDetectorDelegate = self; + options.objectDetectorLiveStreamDelegate = self; MPPObjectDetector *objectDetector = [self objectDetectorWithOptionsSucceeds:options]; @@ -710,7 +710,7 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; [self waitForExpectations:@[ expectation ] timeout:0.5]; } -#pragma mark MPPObjectDetectorDelegate Methods +#pragma mark MPPObjectDetectorLiveStreamDelegate Methods - (void)objectDetector:(MPPObjectDetector *)objectDetector didFinishDetectionWithResult:(MPPObjectDetectionResult *)objectDetectionResult timestampInMilliseconds:(NSInteger)timestampInMilliseconds diff --git a/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetector.h b/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetector.h index 44b7064fb..249ee0434 100644 --- a/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetector.h +++ b/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetector.h @@ -138,7 +138,7 @@ NS_SWIFT_NAME(ObjectDetector) * `MPPRunningModeLiveStream`. Results are provided asynchronously via the `completion` callback * provided in the `MPPObjectDetectorOptions`. * The object which needs to be continuously notified of the available results of object - * detection must confirm to `MPPObjectDetectorDelegate` protocol and implement the + * detection must confirm to `MPPObjectDetectorLiveStreamDelegate` protocol and implement the * `objectDetector:didFinishDetectionWithResult:timestampInMilliseconds:error:` delegate method. * * It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent diff --git a/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetector.mm b/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetector.mm index c384dd4b7..5dfbfdab8 100644 --- a/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetector.mm +++ b/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetector.mm @@ -51,14 +51,13 @@ static NSString *const kTaskName = @"objectDetector"; /** iOS Vision Task Runner */ MPPVisionTaskRunner *_visionTaskRunner; } -@property(nonatomic, weak) id objectDetectorDelegate; +@property(nonatomic, weak) id objectDetectorLiveStreamDelegate; @end @implementation MPPObjectDetector - (instancetype)initWithOptions:(MPPObjectDetectorOptions *)options error:(NSError **)error { self = [super init]; - NSLog(@"Object Detector Initializing with dispatch queu and weak self"); if (self) { MPPTaskInfo *taskInfo = [[MPPTaskInfo alloc] initWithTaskGraphName:kTaskGraphName @@ -80,19 +79,19 @@ static NSString *const kTaskName = @"objectDetector"; PacketsCallback packetsCallback = nullptr; - if (options.objectDetectorDelegate) { - _objectDetectorDelegate = options.objectDetectorDelegate; + if (options.objectDetectorLiveStreamDelegate) { + _objectDetectorLiveStreamDelegate = options.objectDetectorLiveStreamDelegate; // Capturing `self` as weak in order to avoid `self` being kept in memory // and cause a retain cycle, after self is set to `nil`. MPPObjectDetector *__weak weakSelf = self; dispatch_queue_t callbackQueue = - dispatch_queue_create([MPPVisionTaskRunner uniqueQueueNameWithTaskName:kTaskName], NULL); + dispatch_queue_create([MPPVisionTaskRunner uniqueDispatchQueueNameWithSuffix:kTaskName], NULL); packetsCallback = [=](absl::StatusOr statusOrPackets) { if (!weakSelf) { return; } - if (![weakSelf.objectDetectorDelegate + if (![weakSelf.objectDetectorLiveStreamDelegate respondsToSelector:@selector (objectDetector:didFinishDetectionWithResult:timestampInMilliseconds:error:)]) { return; @@ -101,10 +100,10 @@ static NSString *const kTaskName = @"objectDetector"; NSError *callbackError = nil; if (![MPPCommonUtils checkCppError:statusOrPackets.status() toError:&callbackError]) { dispatch_async(callbackQueue, ^{ - [weakSelf.objectDetectorDelegate objectDetector:weakSelf - didFinishDetectionWithResult:nil - timestampInMilliseconds:Timestamp::Unset().Value() - error:callbackError]; + [weakSelf.objectDetectorLiveStreamDelegate objectDetector:weakSelf + didFinishDetectionWithResult:nil + timestampInMilliseconds:Timestamp::Unset().Value() + error:callbackError]; }); return; } @@ -122,10 +121,10 @@ static NSString *const kTaskName = @"objectDetector"; outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() / kMicroSecondsPerMilliSecond; dispatch_async(callbackQueue, ^{ - [weakSelf.objectDetectorDelegate objectDetector:weakSelf - didFinishDetectionWithResult:result - timestampInMilliseconds:timeStampInMilliseconds - error:callbackError]; + [weakSelf.objectDetectorLiveStreamDelegate objectDetector:weakSelf + didFinishDetectionWithResult:result + timestampInMilliseconds:timeStampInMilliseconds + error:callbackError]; }); }; } diff --git a/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetectorOptions.h b/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetectorOptions.h index 547fd5ed0..c60c8acac 100644 --- a/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetectorOptions.h +++ b/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetectorOptions.h @@ -23,15 +23,15 @@ NS_ASSUME_NONNULL_BEGIN @class MPPObjectDetector; /** - * This protocol defines an interface for the delegates of `MPPImageClassifier` object to receive + * This protocol defines an interface for the delegates of `MPPObjectDetector` object to receive * results of performing asynchronous object detection on images * (i.e, when `runningMode` = `MPPRunningModeLiveStream`). * - * The delegate of `MPPImageClassifier` must adopt `MPPImageClassifierDelegate` protocol. + * The delegate of `MPPObjectDetector` must adopt `MPPObjectDetectorLiveStreamDelegate` protocol. * The methods in this protocol are optional. */ -NS_SWIFT_NAME(ObjectDetectorDelegate) -@protocol MPPObjectDetectorDelegate +NS_SWIFT_NAME(ObjectDetectorLiveStreamDelegate) +@protocol MPPObjectDetectorLiveStreamDelegate @optional @@ -77,12 +77,13 @@ NS_SWIFT_NAME(ObjectDetectorOptions) @property(nonatomic) MPPRunningMode runningMode; /** - * An object that confirms to `MPPObjectDetectorDelegate` protocol. This object must implement - * `objectDetector:didFinishDetectionWithResult:timestampInMilliseconds:error:` - * to receive the results of performing asynchronous object detection on images (i.e, when - * `runningMode` = `MPPRunningModeLiveStream`). + * An object that confirms to `MPPObjectDetectorLiveStreamDelegate` protocol. This object must + * implement `objectDetector:didFinishDetectionWithResult:timestampInMilliseconds:error:` to receive + * the results of performing asynchronous object detection on images (i.e, when `runningMode` = + * `MPPRunningModeLiveStream`). */ -@property(nonatomic, weak, nullable) id objectDetectorDelegate; +@property(nonatomic, weak, nullable) id + objectDetectorLiveStreamDelegate; /** * The locale to use for display names specified through the TFLite Model Metadata, if any. Defaults diff --git a/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetectorOptions.m b/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetectorOptions.m index 1990a28ec..b93a6b30b 100644 --- a/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetectorOptions.m +++ b/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetectorOptions.m @@ -33,7 +33,7 @@ objectDetectorOptions.categoryDenylist = self.categoryDenylist; objectDetectorOptions.categoryAllowlist = self.categoryAllowlist; objectDetectorOptions.displayNamesLocale = self.displayNamesLocale; - objectDetectorOptions.objectDetectorDelegate = self.objectDetectorDelegate; + objectDetectorOptions.objectDetectorLiveStreamDelegate = self.objectDetectorLiveStreamDelegate; return objectDetectorOptions; }