Added a method for processing live stream results in callback for iOS gesture recognizer

This commit is contained in:
Prianka Liz Kariat 2023-05-25 19:56:29 +05:30
parent b16905e362
commit 58772a73fc
2 changed files with 43 additions and 43 deletions

View File

@ -41,7 +41,7 @@ using ClassificationProto = ::mediapipe::Classification;
} }
+ (MPPCategory *)categoryWithProto:(const ClassificationProto &)classificationProto { + (MPPCategory *)categoryWithProto:(const ClassificationProto &)classificationProto {
return [MPPCategory categoryWithProto:classificationProto andIndex:classificationProto.index()]; return [MPPCategory categoryWithProto:classificationProto index:classificationProto.index()];
} }
@end @end

View File

@ -57,6 +57,7 @@ static NSString *const kTaskName = @"gestureRecognizer";
@interface MPPGestureRecognizer () { @interface MPPGestureRecognizer () {
/** iOS Vision Task Runner */ /** iOS Vision Task Runner */
MPPVisionTaskRunner *_visionTaskRunner; MPPVisionTaskRunner *_visionTaskRunner;
dispatch_queue_t _callbackQueue;
} }
@property(nonatomic, weak) id<MPPGestureRecognizerLiveStreamDelegate> @property(nonatomic, weak) id<MPPGestureRecognizerLiveStreamDelegate>
gestureRecognizerLiveStreamDelegate; gestureRecognizerLiveStreamDelegate;
@ -77,6 +78,44 @@ static NSString *const kTaskName = @"gestureRecognizer";
.cppString]]; .cppString]];
} }
- (void)processLiveStreamResult:(absl::StatusOr<PacketMap>)liveStreamResult {
MPPGestureRecognizer *__weak weakSelf = self;
if (![weakSelf.gestureRecognizerLiveStreamDelegate
respondsToSelector:@selector(gestureRecognizer:
didFinishRecognitionWithResult:timestampInMilliseconds:error:)]) {
return;
}
NSError *callbackError = nil;
if (![MPPCommonUtils checkCppError:liveStreamResult.status() toError:&callbackError]) {
dispatch_async(_callbackQueue, ^{
[weakSelf.gestureRecognizerLiveStreamDelegate gestureRecognizer:weakSelf
didFinishRecognitionWithResult:nil
timestampInMilliseconds:Timestamp::Unset().Value()
error:callbackError];
});
return;
}
PacketMap &outputPacketMap = liveStreamResult.value();
if (outputPacketMap[kImageOutStreamName.cppString].IsEmpty()) {
return;
}
MPPGestureRecognizerResult *result =
[weakSelf gestureRecognizerResultWithOutputPacketMap:outputPacketMap];
NSInteger timeStampInMilliseconds =
outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() /
kMicroSecondsPerMilliSecond;
dispatch_async(_callbackQueue, ^{
[weakSelf.gestureRecognizerLiveStreamDelegate gestureRecognizer:weakSelf
didFinishRecognitionWithResult:result
timestampInMilliseconds:timeStampInMilliseconds
error:callbackError];
});
}
- (instancetype)initWithOptions:(MPPGestureRecognizerOptions *)options error:(NSError **)error { - (instancetype)initWithOptions:(MPPGestureRecognizerOptions *)options error:(NSError **)error {
self = [super init]; self = [super init];
if (self) { if (self) {
@ -116,47 +155,9 @@ static NSString *const kTaskName = @"gestureRecognizer";
// the delegate method, the queue on which the C++ callbacks is invoked is not blocked and is // the delegate method, the queue on which the C++ callbacks is invoked is not blocked and is
// freed up to continue with its operations. // freed up to continue with its operations.
const char *queueName = [MPPVisionTaskRunner uniqueDispatchQueueNameWithSuffix:kTaskName]; const char *queueName = [MPPVisionTaskRunner uniqueDispatchQueueNameWithSuffix:kTaskName];
dispatch_queue_t callbackQueue = dispatch_queue_create(queueName, NULL); _callbackQueue = dispatch_queue_create(queueName, NULL);
packetsCallback = [=](absl::StatusOr<PacketMap> status_or_packets) { packetsCallback = [=](absl::StatusOr<PacketMap> liveStreamResult) {
if (!weakSelf) { [weakSelf processLiveStreamResult:liveStreamResult];
return;
}
if (![weakSelf.gestureRecognizerLiveStreamDelegate
respondsToSelector:@selector
(gestureRecognizer:
didFinishRecognitionWithResult:timestampInMilliseconds:error:)]) {
return;
}
NSError *callbackError = nil;
if (![MPPCommonUtils checkCppError:status_or_packets.status() toError:&callbackError]) {
dispatch_async(callbackQueue, ^{
[weakSelf.gestureRecognizerLiveStreamDelegate
gestureRecognizer:weakSelf
didFinishRecognitionWithResult:nil
timestampInMilliseconds:Timestamp::Unset().Value()
error:callbackError];
});
return;
}
PacketMap &outputPacketMap = status_or_packets.value();
if (outputPacketMap[kImageOutStreamName.cppString].IsEmpty()) {
return;
}
MPPGestureRecognizerResult *result =
[weakSelf gestureRecognizerResultWithOutputPacketMap:outputPacketMap];
NSInteger timeStampInMilliseconds =
outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() /
kMicroSecondsPerMilliSecond;
dispatch_async(callbackQueue, ^{
[weakSelf.gestureRecognizerLiveStreamDelegate gestureRecognizer:weakSelf
didFinishRecognitionWithResult:result
timestampInMilliseconds:timeStampInMilliseconds
error:callbackError];
});
}; };
} }
@ -165,7 +166,6 @@ static NSString *const kTaskName = @"gestureRecognizer";
runningMode:options.runningMode runningMode:options.runningMode
packetsCallback:std::move(packetsCallback) packetsCallback:std::move(packetsCallback)
error:error]; error:error];
if (!_visionTaskRunner) { if (!_visionTaskRunner) {
return nil; return nil;
} }