Added condition check for delegates

This commit is contained in:
Prianka Liz Kariat 2023-05-02 08:45:08 +05:30
parent c9b00b07e0
commit 82a238a0c8
4 changed files with 63 additions and 45 deletions

View File

@ -164,9 +164,9 @@ NS_SWIFT_NAME(ImageClassifier)
* Sends live stream image data of type `MPPImage` to perform image classification using the whole * Sends live stream image data of type `MPPImage` to perform image classification using the whole
* image as region of interest. Rotation will be applied according to the `orientation` property of * image as region of interest. Rotation will be applied according to the `orientation` property of
* the provided `MPPImage`. Only use this method when the `MPPImageClassifier` is created with * the provided `MPPImage`. Only use this method when the `MPPImageClassifier` is created with
* `MPPRunningModeLiveStream`. * `MPPRunningModeLiveStream`.
* The object which needs to be continuously notified of the available results of image * The object which needs to be continuously notified of the available results of image
* classification must confirm to `MPPImageClassifierDelegate` protocol and implement the * classification must confirm to `MPPImageClassifierDelegate` protocol and implement the
* `imageClassifier:didFinishClassificationWithResult:timestampInMilliseconds:error:` * `imageClassifier:didFinishClassificationWithResult:timestampInMilliseconds:error:`
* delegate method. * delegate method.
* *
@ -188,13 +188,14 @@ NS_SWIFT_NAME(ImageClassifier)
NS_SWIFT_NAME(classifyAsync(image:timestampInMilliseconds:)); NS_SWIFT_NAME(classifyAsync(image:timestampInMilliseconds:));
/** /**
* Sends live stream image data of type `MPPImage` to perform image classification, cropped to the * Sends live stream image data of type ``MPPImage`` to perform image classification, cropped to the
* specified region of interest.. Rotation will be applied according to the `orientation` property * specified region of interest.. Rotation will be applied according to the `orientation` property
* of the provided `MPPImage`. Only use this method when the `MPPImageClassifier` is created with * of the provided `MPPImage`. Only use this method when the `MPPImageClassifier` is created with
* `MPPRunningModeLiveStream`. * `MPPRunningModeLiveStream`.
* The object which needs to be continuously notified of the available results of image * The object which needs to be continuously notified of the available results of image
* classification must confirm to `MPPImageClassifierDelegate` protocol and implement the * classification must confirm to `MPPImageClassifierDelegate` protocol and implement the
* `imageClassifier:didFinishClassificationWithResult:timestampInMilliseconds:error:` delegate method. * `imageClassifier:didFinishClassificationWithResult:timestampInMilliseconds:error:` delegate
* method.
* *
* It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent * It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent
* to the image classifier. The input timestamps must be monotonically increasing. * to the image classifier. The input timestamps must be monotonically increasing.

View File

@ -88,10 +88,15 @@ static NSString *const kTaskGraphName =
packetsCallback = [=](absl::StatusOr<PacketMap> status_or_packets) { packetsCallback = [=](absl::StatusOr<PacketMap> status_or_packets) {
NSError *callbackError = nil; NSError *callbackError = nil;
if (![MPPCommonUtils checkCppError:status_or_packets.status() toError:&callbackError]) { if (![MPPCommonUtils checkCppError:status_or_packets.status() toError:&callbackError]) {
[_imageClassifierDelegate imageClassifier:self if ([_imageClassifierDelegate
didFinishClassificationWithResult:nil respondsToSelector:@selector
timestampInMilliseconds:Timestamp::Unset().Value() (imageClassifier:
error:callbackError]; didFinishClassificationWithResult:timestampInMilliseconds:error:)]) {
[_imageClassifierDelegate imageClassifier:self
didFinishClassificationWithResult:nil
timestampInMilliseconds:Timestamp::Unset().Value()
error:callbackError];
}
return; return;
} }
@ -104,13 +109,18 @@ static NSString *const kTaskGraphName =
[MPPImageClassifierResult imageClassifierResultWithClassificationsPacket: [MPPImageClassifierResult imageClassifierResultWithClassificationsPacket:
outputPacketMap[kClassificationsStreamName.cppString]]; outputPacketMap[kClassificationsStreamName.cppString]];
[_imageClassifierDelegate imageClassifier:self if ([_imageClassifierDelegate
didFinishClassificationWithResult:result respondsToSelector:@selector
timestampInMilliseconds:outputPacketMap[kImageOutStreamName.cppString] (imageClassifier:
.Timestamp() didFinishClassificationWithResult:timestampInMilliseconds:error:)]) {
.Value() / [_imageClassifierDelegate imageClassifier:self
kMicroSecondsPerMilliSecond didFinishClassificationWithResult:result
error:callbackError]; timestampInMilliseconds:outputPacketMap[kImageOutStreamName.cppString]
.Timestamp()
.Value() /
kMicroSecondsPerMilliSecond
error:callbackError];
}
}; };
} }

View File

@ -137,8 +137,8 @@ NS_SWIFT_NAME(ObjectDetector)
* the provided `MPPImage`. Only use this method when the `MPPObjectDetector` is created with * the provided `MPPImage`. Only use this method when the `MPPObjectDetector` is created with
* `MPPRunningModeLiveStream`. Results are provided asynchronously via the `completion` callback * `MPPRunningModeLiveStream`. Results are provided asynchronously via the `completion` callback
* provided in the `MPPObjectDetectorOptions`. * provided in the `MPPObjectDetectorOptions`.
* The object which needs to be continuously notified of the available results of object * The object which needs to be continuously notified of the available results of object
* detection must confirm to `MPPObjectDetectorDelegate` protocol and implement the * detection must confirm to `MPPObjectDetectorDelegate` protocol and implement the
* `objectDetector:didFinishDetectionWithResult:timestampInMilliseconds:error:` delegate method. * `objectDetector:didFinishDetectionWithResult:timestampInMilliseconds:error:` delegate method.
* *
* It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent * It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent

View File

@ -84,10 +84,14 @@ static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorG
packetsCallback = [=](absl::StatusOr<PacketMap> statusOrPackets) { packetsCallback = [=](absl::StatusOr<PacketMap> statusOrPackets) {
NSError *callbackError = nil; NSError *callbackError = nil;
if (![MPPCommonUtils checkCppError:statusOrPackets.status() toError:&callbackError]) { if (![MPPCommonUtils checkCppError:statusOrPackets.status() toError:&callbackError]) {
[_objectDetectorDelegate objectDetector:self if ([_objectDetectorDelegate
didFinishDetectionWithResult:nil respondsToSelector:@selector
timestampInMilliseconds:Timestamp::Unset().Value() (objectDetector:didFinishDetectionWithResult:timestampInMilliseconds:error:)]) {
error:callbackError]; [_objectDetectorDelegate objectDetector:self
didFinishDetectionWithResult:nil
timestampInMilliseconds:Timestamp::Unset().Value()
error:callbackError];
}
return; return;
} }
@ -99,29 +103,32 @@ static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorG
MPPObjectDetectionResult *result = [MPPObjectDetectionResult MPPObjectDetectionResult *result = [MPPObjectDetectionResult
objectDetectionResultWithDetectionsPacket:statusOrPackets.value()[kDetectionsStreamName objectDetectionResultWithDetectionsPacket:statusOrPackets.value()[kDetectionsStreamName
.cppString]]; .cppString]];
if ([_objectDetectorDelegate
respondsToSelector:@selector
(objectDetector:didFinishDetectionWithResult:timestampInMilliseconds:error:)]) {
[_objectDetectorDelegate objectDetector:self
didFinishDetectionWithResult:result
timestampInMilliseconds:outputPacketMap[kImageOutStreamName.cppString]
.Timestamp()
.Value() /
kMicroSecondsPerMilliSecond
error:callbackError];
}
};
}
[_objectDetectorDelegate objectDetector:self _visionTaskRunner =
didFinishDetectionWithResult:result [[MPPVisionTaskRunner alloc] initWithCalculatorGraphConfig:[taskInfo generateGraphConfig]
timestampInMilliseconds:outputPacketMap[kImageOutStreamName.cppString] runningMode:options.runningMode
.Timestamp() packetsCallback:std::move(packetsCallback)
.Value() / error:error];
kMicroSecondsPerMilliSecond
error:callbackError]; if (!_visionTaskRunner) {
} return nil;
}; }
} }
_visionTaskRunner = return self;
[[MPPVisionTaskRunner alloc] initWithCalculatorGraphConfig:[taskInfo generateGraphConfig]
runningMode:options.runningMode
packetsCallback:std::move(packetsCallback)
error:error];
if (!_visionTaskRunner) {
return nil;
}
}
return self;
} }
- (instancetype)initWithModelPath:(NSString *)modelPath error:(NSError **)error { - (instancetype)initWithModelPath:(NSString *)modelPath error:(NSError **)error {