From 82a238a0c88c3385e176ec066ca76e1e6d892db1 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Tue, 2 May 2023 08:45:08 +0530 Subject: [PATCH] Added condition check for delegates --- .../sources/MPPImageClassifier.h | 17 +++--- .../sources/MPPImageClassifier.mm | 32 +++++++---- .../sources/MPPObjectDetector.h | 4 +- .../sources/MPPObjectDetector.mm | 55 +++++++++++-------- 4 files changed, 63 insertions(+), 45 deletions(-) diff --git a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.h b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.h index 7cf1f9148..9086ba7ac 100644 --- a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.h +++ b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.h @@ -164,9 +164,9 @@ NS_SWIFT_NAME(ImageClassifier) * Sends live stream image data of type `MPPImage` to perform image classification using the whole * image as region of interest. Rotation will be applied according to the `orientation` property of * the provided `MPPImage`. Only use this method when the `MPPImageClassifier` is created with - * `MPPRunningModeLiveStream`. - * The object which needs to be continuously notified of the available results of image - * classification must confirm to `MPPImageClassifierDelegate` protocol and implement the + * `MPPRunningModeLiveStream`. + * The object which needs to be continuously notified of the available results of image + * classification must confirm to `MPPImageClassifierDelegate` protocol and implement the * `imageClassifier:didFinishClassificationWithResult:timestampInMilliseconds:error:` * delegate method. * @@ -188,13 +188,14 @@ NS_SWIFT_NAME(ImageClassifier) NS_SWIFT_NAME(classifyAsync(image:timestampInMilliseconds:)); /** - * Sends live stream image data of type `MPPImage` to perform image classification, cropped to the + * Sends live stream image data of type ``MPPImage`` to perform image classification, cropped to the * specified region of interest.. Rotation will be applied according to the `orientation` property * of the provided `MPPImage`. Only use this method when the `MPPImageClassifier` is created with - * `MPPRunningModeLiveStream`. - * The object which needs to be continuously notified of the available results of image - * classification must confirm to `MPPImageClassifierDelegate` protocol and implement the - * `imageClassifier:didFinishClassificationWithResult:timestampInMilliseconds:error:` delegate method. + * `MPPRunningModeLiveStream`. + * The object which needs to be continuously notified of the available results of image + * classification must confirm to `MPPImageClassifierDelegate` protocol and implement the + * `imageClassifier:didFinishClassificationWithResult:timestampInMilliseconds:error:` delegate + * method. * * It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent * to the image classifier. The input timestamps must be monotonically increasing. diff --git a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.mm b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.mm index aae539d27..45daa6db0 100644 --- a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.mm +++ b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.mm @@ -88,10 +88,15 @@ static NSString *const kTaskGraphName = packetsCallback = [=](absl::StatusOr status_or_packets) { NSError *callbackError = nil; if (![MPPCommonUtils checkCppError:status_or_packets.status() toError:&callbackError]) { - [_imageClassifierDelegate imageClassifier:self - didFinishClassificationWithResult:nil - timestampInMilliseconds:Timestamp::Unset().Value() - error:callbackError]; + if ([_imageClassifierDelegate + respondsToSelector:@selector + (imageClassifier: + didFinishClassificationWithResult:timestampInMilliseconds:error:)]) { + [_imageClassifierDelegate imageClassifier:self + didFinishClassificationWithResult:nil + timestampInMilliseconds:Timestamp::Unset().Value() + error:callbackError]; + } return; } @@ -104,13 +109,18 @@ static NSString *const kTaskGraphName = [MPPImageClassifierResult imageClassifierResultWithClassificationsPacket: outputPacketMap[kClassificationsStreamName.cppString]]; - [_imageClassifierDelegate imageClassifier:self - didFinishClassificationWithResult:result - timestampInMilliseconds:outputPacketMap[kImageOutStreamName.cppString] - .Timestamp() - .Value() / - kMicroSecondsPerMilliSecond - error:callbackError]; + if ([_imageClassifierDelegate + respondsToSelector:@selector + (imageClassifier: + didFinishClassificationWithResult:timestampInMilliseconds:error:)]) { + [_imageClassifierDelegate imageClassifier:self + didFinishClassificationWithResult:result + timestampInMilliseconds:outputPacketMap[kImageOutStreamName.cppString] + .Timestamp() + .Value() / + kMicroSecondsPerMilliSecond + error:callbackError]; + } }; } diff --git a/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetector.h b/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetector.h index f1a40d6ad..44b7064fb 100644 --- a/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetector.h +++ b/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetector.h @@ -137,8 +137,8 @@ NS_SWIFT_NAME(ObjectDetector) * the provided `MPPImage`. Only use this method when the `MPPObjectDetector` is created with * `MPPRunningModeLiveStream`. Results are provided asynchronously via the `completion` callback * provided in the `MPPObjectDetectorOptions`. - * The object which needs to be continuously notified of the available results of object - * detection must confirm to `MPPObjectDetectorDelegate` protocol and implement the + * The object which needs to be continuously notified of the available results of object + * detection must confirm to `MPPObjectDetectorDelegate` protocol and implement the * `objectDetector:didFinishDetectionWithResult:timestampInMilliseconds:error:` delegate method. * * It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent diff --git a/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetector.mm b/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetector.mm index 8c25092a2..fbac6f00d 100644 --- a/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetector.mm +++ b/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetector.mm @@ -84,10 +84,14 @@ static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorG packetsCallback = [=](absl::StatusOr statusOrPackets) { NSError *callbackError = nil; if (![MPPCommonUtils checkCppError:statusOrPackets.status() toError:&callbackError]) { - [_objectDetectorDelegate objectDetector:self - didFinishDetectionWithResult:nil - timestampInMilliseconds:Timestamp::Unset().Value() - error:callbackError]; + if ([_objectDetectorDelegate + respondsToSelector:@selector + (objectDetector:didFinishDetectionWithResult:timestampInMilliseconds:error:)]) { + [_objectDetectorDelegate objectDetector:self + didFinishDetectionWithResult:nil + timestampInMilliseconds:Timestamp::Unset().Value() + error:callbackError]; + } return; } @@ -99,29 +103,32 @@ static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorG MPPObjectDetectionResult *result = [MPPObjectDetectionResult objectDetectionResultWithDetectionsPacket:statusOrPackets.value()[kDetectionsStreamName .cppString]]; + if ([_objectDetectorDelegate + respondsToSelector:@selector + (objectDetector:didFinishDetectionWithResult:timestampInMilliseconds:error:)]) { + [_objectDetectorDelegate objectDetector:self + didFinishDetectionWithResult:result + timestampInMilliseconds:outputPacketMap[kImageOutStreamName.cppString] + .Timestamp() + .Value() / + kMicroSecondsPerMilliSecond + error:callbackError]; + } + }; + } - [_objectDetectorDelegate objectDetector:self - didFinishDetectionWithResult:result - timestampInMilliseconds:outputPacketMap[kImageOutStreamName.cppString] - .Timestamp() - .Value() / - kMicroSecondsPerMilliSecond - error:callbackError]; - } - }; + _visionTaskRunner = + [[MPPVisionTaskRunner alloc] initWithCalculatorGraphConfig:[taskInfo generateGraphConfig] + runningMode:options.runningMode + packetsCallback:std::move(packetsCallback) + error:error]; + + if (!_visionTaskRunner) { + return nil; + } } - _visionTaskRunner = - [[MPPVisionTaskRunner alloc] initWithCalculatorGraphConfig:[taskInfo generateGraphConfig] - runningMode:options.runningMode - packetsCallback:std::move(packetsCallback) - error:error]; - - if (!_visionTaskRunner) { - return nil; - } -} -return self; + return self; } - (instancetype)initWithModelPath:(NSString *)modelPath error:(NSError **)error {