Added condition check for delegates

This commit is contained in:
Prianka Liz Kariat 2023-05-02 08:45:08 +05:30
parent c9b00b07e0
commit 82a238a0c8
4 changed files with 63 additions and 45 deletions

View File

@ -164,9 +164,9 @@ NS_SWIFT_NAME(ImageClassifier)
* Sends live stream image data of type `MPPImage` to perform image classification using the whole
* image as region of interest. Rotation will be applied according to the `orientation` property of
* the provided `MPPImage`. Only use this method when the `MPPImageClassifier` is created with
* `MPPRunningModeLiveStream`.
* The object which needs to be continuously notified of the available results of image
* classification must confirm to `MPPImageClassifierDelegate` protocol and implement the
* `MPPRunningModeLiveStream`.
* The object which needs to be continuously notified of the available results of image
* classification must confirm to `MPPImageClassifierDelegate` protocol and implement the
* `imageClassifier:didFinishClassificationWithResult:timestampInMilliseconds:error:`
* delegate method.
*
@ -188,13 +188,14 @@ NS_SWIFT_NAME(ImageClassifier)
NS_SWIFT_NAME(classifyAsync(image:timestampInMilliseconds:));
/**
* Sends live stream image data of type `MPPImage` to perform image classification, cropped to the
* Sends live stream image data of type ``MPPImage`` to perform image classification, cropped to the
* specified region of interest.. Rotation will be applied according to the `orientation` property
* of the provided `MPPImage`. Only use this method when the `MPPImageClassifier` is created with
* `MPPRunningModeLiveStream`.
* The object which needs to be continuously notified of the available results of image
* classification must confirm to `MPPImageClassifierDelegate` protocol and implement the
* `imageClassifier:didFinishClassificationWithResult:timestampInMilliseconds:error:` delegate method.
* `MPPRunningModeLiveStream`.
* The object which needs to be continuously notified of the available results of image
* classification must confirm to `MPPImageClassifierDelegate` protocol and implement the
* `imageClassifier:didFinishClassificationWithResult:timestampInMilliseconds:error:` delegate
* method.
*
* It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent
* to the image classifier. The input timestamps must be monotonically increasing.

View File

@ -88,10 +88,15 @@ static NSString *const kTaskGraphName =
packetsCallback = [=](absl::StatusOr<PacketMap> status_or_packets) {
NSError *callbackError = nil;
if (![MPPCommonUtils checkCppError:status_or_packets.status() toError:&callbackError]) {
[_imageClassifierDelegate imageClassifier:self
didFinishClassificationWithResult:nil
timestampInMilliseconds:Timestamp::Unset().Value()
error:callbackError];
if ([_imageClassifierDelegate
respondsToSelector:@selector
(imageClassifier:
didFinishClassificationWithResult:timestampInMilliseconds:error:)]) {
[_imageClassifierDelegate imageClassifier:self
didFinishClassificationWithResult:nil
timestampInMilliseconds:Timestamp::Unset().Value()
error:callbackError];
}
return;
}
@ -104,13 +109,18 @@ static NSString *const kTaskGraphName =
[MPPImageClassifierResult imageClassifierResultWithClassificationsPacket:
outputPacketMap[kClassificationsStreamName.cppString]];
[_imageClassifierDelegate imageClassifier:self
didFinishClassificationWithResult:result
timestampInMilliseconds:outputPacketMap[kImageOutStreamName.cppString]
.Timestamp()
.Value() /
kMicroSecondsPerMilliSecond
error:callbackError];
if ([_imageClassifierDelegate
respondsToSelector:@selector
(imageClassifier:
didFinishClassificationWithResult:timestampInMilliseconds:error:)]) {
[_imageClassifierDelegate imageClassifier:self
didFinishClassificationWithResult:result
timestampInMilliseconds:outputPacketMap[kImageOutStreamName.cppString]
.Timestamp()
.Value() /
kMicroSecondsPerMilliSecond
error:callbackError];
}
};
}

View File

@ -137,8 +137,8 @@ NS_SWIFT_NAME(ObjectDetector)
* the provided `MPPImage`. Only use this method when the `MPPObjectDetector` is created with
* `MPPRunningModeLiveStream`. Results are provided asynchronously via the `completion` callback
* provided in the `MPPObjectDetectorOptions`.
* The object which needs to be continuously notified of the available results of object
* detection must confirm to `MPPObjectDetectorDelegate` protocol and implement the
* The object which needs to be continuously notified of the available results of object
* detection must confirm to `MPPObjectDetectorDelegate` protocol and implement the
* `objectDetector:didFinishDetectionWithResult:timestampInMilliseconds:error:` delegate method.
*
* It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent

View File

@ -84,10 +84,14 @@ static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorG
packetsCallback = [=](absl::StatusOr<PacketMap> statusOrPackets) {
NSError *callbackError = nil;
if (![MPPCommonUtils checkCppError:statusOrPackets.status() toError:&callbackError]) {
[_objectDetectorDelegate objectDetector:self
didFinishDetectionWithResult:nil
timestampInMilliseconds:Timestamp::Unset().Value()
error:callbackError];
if ([_objectDetectorDelegate
respondsToSelector:@selector
(objectDetector:didFinishDetectionWithResult:timestampInMilliseconds:error:)]) {
[_objectDetectorDelegate objectDetector:self
didFinishDetectionWithResult:nil
timestampInMilliseconds:Timestamp::Unset().Value()
error:callbackError];
}
return;
}
@ -99,29 +103,32 @@ static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorG
MPPObjectDetectionResult *result = [MPPObjectDetectionResult
objectDetectionResultWithDetectionsPacket:statusOrPackets.value()[kDetectionsStreamName
.cppString]];
if ([_objectDetectorDelegate
respondsToSelector:@selector
(objectDetector:didFinishDetectionWithResult:timestampInMilliseconds:error:)]) {
[_objectDetectorDelegate objectDetector:self
didFinishDetectionWithResult:result
timestampInMilliseconds:outputPacketMap[kImageOutStreamName.cppString]
.Timestamp()
.Value() /
kMicroSecondsPerMilliSecond
error:callbackError];
}
};
}
[_objectDetectorDelegate objectDetector:self
didFinishDetectionWithResult:result
timestampInMilliseconds:outputPacketMap[kImageOutStreamName.cppString]
.Timestamp()
.Value() /
kMicroSecondsPerMilliSecond
error:callbackError];
}
};
_visionTaskRunner =
[[MPPVisionTaskRunner alloc] initWithCalculatorGraphConfig:[taskInfo generateGraphConfig]
runningMode:options.runningMode
packetsCallback:std::move(packetsCallback)
error:error];
if (!_visionTaskRunner) {
return nil;
}
}
_visionTaskRunner =
[[MPPVisionTaskRunner alloc] initWithCalculatorGraphConfig:[taskInfo generateGraphConfig]
runningMode:options.runningMode
packetsCallback:std::move(packetsCallback)
error:error];
if (!_visionTaskRunner) {
return nil;
}
}
return self;
return self;
}
- (instancetype)initWithModelPath:(NSString *)modelPath error:(NSError **)error {