diff --git a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.h b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.h index 9086ba7ac..024eee0aa 100644 --- a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.h +++ b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.h @@ -166,7 +166,7 @@ NS_SWIFT_NAME(ImageClassifier) * the provided `MPPImage`. Only use this method when the `MPPImageClassifier` is created with * `MPPRunningModeLiveStream`. * The object which needs to be continuously notified of the available results of image - * classification must confirm to `MPPImageClassifierDelegate` protocol and implement the + * classification must confirm to `MPPImageClassifierLiveStreamDelegate` protocol and implement the * `imageClassifier:didFinishClassificationWithResult:timestampInMilliseconds:error:` * delegate method. * @@ -193,7 +193,7 @@ NS_SWIFT_NAME(ImageClassifier) * of the provided `MPPImage`. Only use this method when the `MPPImageClassifier` is created with * `MPPRunningModeLiveStream`. * The object which needs to be continuously notified of the available results of image - * classification must confirm to `MPPImageClassifierDelegate` protocol and implement the + * classification must confirm to `MPPImageClassifierLiveStreamDelegate` protocol and implement the * `imageClassifier:didFinishClassificationWithResult:timestampInMilliseconds:error:` delegate * method. * diff --git a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.mm b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.mm index 14b95bed1..408153c01 100644 --- a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.mm +++ b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.mm @@ -54,14 +54,14 @@ static NSString *const kTaskName = @"imageClassifier"; /** iOS Vision Task Runner */ MPPVisionTaskRunner *_visionTaskRunner; } -@property(nonatomic, weak) id imageClassifierDelegate; +@property(nonatomic, weak) id + imageClassifierLiveStreamDelegate; @end @implementation MPPImageClassifier - (instancetype)initWithOptions:(MPPImageClassifierOptions *)options error:(NSError **)error { self = [super init]; - NSLog(@"Image Classifier Initializing with dispatch queu and weak self"); if (self) { MPPTaskInfo *taskInfo = [[MPPTaskInfo alloc] initWithTaskGraphName:kTaskGraphName @@ -84,9 +84,8 @@ static NSString *const kTaskName = @"imageClassifier"; PacketsCallback packetsCallback = nullptr; - if (options.imageClassifierDelegate) { - _imageClassifierDelegate = options.imageClassifierDelegate; - + if (options.imageClassifierLiveStreamDelegate) { + _imageClassifierLiveStreamDelegate = options.imageClassifierLiveStreamDelegate; // Capturing `self` as weak in order to avoid `self` being kept in memory // and cause a retain cycle, after self is set to `nil`. MPPImageClassifier *__weak weakSelf = self; @@ -95,13 +94,13 @@ static NSString *const kTaskName = @"imageClassifier"; // asynchronously. This is to ensure that if the client performs a long running operation in // the delegate method, the queue on which the C++ callbacks is invoked is not blocked and is // freed up to continue with its operations. - const char *queueName = [MPPVisionTaskRunner uniqueQueueNameWithTaskName:kTaskName]; + const char *queueName = [MPPVisionTaskRunner uniqueDispatchQueueNameWithSuffix:kTaskName]; dispatch_queue_t callbackQueue = dispatch_queue_create(queueName, NULL); packetsCallback = [=](absl::StatusOr status_or_packets) { if (!weakSelf) { return; } - if (![weakSelf.imageClassifierDelegate + if (![weakSelf.imageClassifierLiveStreamDelegate respondsToSelector:@selector (imageClassifier: didFinishClassificationWithResult:timestampInMilliseconds:error:)]) { @@ -111,10 +110,10 @@ static NSString *const kTaskName = @"imageClassifier"; NSError *callbackError = nil; if (![MPPCommonUtils checkCppError:status_or_packets.status() toError:&callbackError]) { dispatch_async(callbackQueue, ^{ - [weakSelf.imageClassifierDelegate imageClassifier:weakSelf - didFinishClassificationWithResult:nil - timestampInMilliseconds:Timestamp::Unset().Value() - error:callbackError]; + [weakSelf.imageClassifierLiveStreamDelegate imageClassifier:weakSelf + didFinishClassificationWithResult:nil + timestampInMilliseconds:Timestamp::Unset().Value() + error:callbackError]; }); return; } @@ -132,10 +131,10 @@ static NSString *const kTaskName = @"imageClassifier"; outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() / kMicroSecondsPerMilliSecond; dispatch_async(callbackQueue, ^{ - [weakSelf.imageClassifierDelegate imageClassifier:weakSelf - didFinishClassificationWithResult:result - timestampInMilliseconds:timeStampInMilliseconds - error:callbackError]; + [weakSelf.imageClassifierLiveStreamDelegate imageClassifier:weakSelf + didFinishClassificationWithResult:result + timestampInMilliseconds:timeStampInMilliseconds + error:callbackError]; }); }; } diff --git a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierOptions.h b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierOptions.h index 84f6ff07b..fc76560c2 100644 --- a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierOptions.h +++ b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierOptions.h @@ -27,11 +27,11 @@ NS_ASSUME_NONNULL_BEGIN * results of asynchronous classification of images * (i.e, when `runningMode = MPPRunningModeLiveStream`). * - * The delegate of `MPPImageClassifier` must adopt `MPPImageClassifierDelegate` protocol. + * The delegate of `MPPImageClassifier` must adopt `MPPImageClassifierLiveStreamDelegate` protocol. * The methods in this protocol are optional. */ -NS_SWIFT_NAME(ImageClassifierDelegate) -@protocol MPPImageClassifierDelegate +NS_SWIFT_NAME(ImageClassifierLiveStreamDelegate) +@protocol MPPImageClassifierLiveStreamDelegate @optional /** @@ -75,12 +75,13 @@ NS_SWIFT_NAME(ImageClassifierOptions) @property(nonatomic) MPPRunningMode runningMode; /** - * An object that confirms to `MPPImageClassifierDelegate` protocol. This object must implement - * `objectDetector:didFinishDetectionWithResult:timestampInMilliseconds:error:` - * to receive the results of asynchronous classification on images (i.e, when `runningMode = + * An object that confirms to `MPPImageClassifierLiveStreamDelegate` protocol. This object must + * implement `objectDetector:didFinishDetectionWithResult:timestampInMilliseconds:error:` to receive + * the results of asynchronous classification on images (i.e, when `runningMode = * MPPRunningModeLiveStream`). */ -@property(nonatomic, weak, nullable) id imageClassifierDelegate; +@property(nonatomic, weak, nullable) id + imageClassifierLiveStreamDelegate; /** * The locale to use for display names specified through the TFLite Model Metadata, if any. Defaults diff --git a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierOptions.m b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierOptions.m index 2b50ae209..8d3815ff3 100644 --- a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierOptions.m +++ b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierOptions.m @@ -33,7 +33,7 @@ imageClassifierOptions.categoryDenylist = self.categoryDenylist; imageClassifierOptions.categoryAllowlist = self.categoryAllowlist; imageClassifierOptions.displayNamesLocale = self.displayNamesLocale; - imageClassifierOptions.imageClassifierDelegate = self.imageClassifierDelegate; + imageClassifierOptions.imageClassifierLiveStreamDelegate = self.imageClassifierLiveStreamDelegate; return imageClassifierOptions; }