Merge pull request #4743 from priankakariatyml:ios-vision-task-runner-refactor-impl
PiperOrigin-RevId: 561796747
This commit is contained in:
commit
827c2983bd
|
@ -55,7 +55,7 @@ objc_library(
|
||||||
"//mediapipe/tasks/ios/core:MPPTaskInfo",
|
"//mediapipe/tasks/ios/core:MPPTaskInfo",
|
||||||
"//mediapipe/tasks/ios/vision/core:MPPImage",
|
"//mediapipe/tasks/ios/vision/core:MPPImage",
|
||||||
"//mediapipe/tasks/ios/vision/core:MPPVisionPacketCreator",
|
"//mediapipe/tasks/ios/vision/core:MPPVisionPacketCreator",
|
||||||
"//mediapipe/tasks/ios/vision/core:MPPVisionTaskRunner",
|
"//mediapipe/tasks/ios/vision/core:MPPVisionTaskRunnerRefactored",
|
||||||
"//mediapipe/tasks/ios/vision/face_detector/utils:MPPFaceDetectorOptionsHelpers",
|
"//mediapipe/tasks/ios/vision/face_detector/utils:MPPFaceDetectorOptionsHelpers",
|
||||||
"//mediapipe/tasks/ios/vision/face_detector/utils:MPPFaceDetectorResultHelpers",
|
"//mediapipe/tasks/ios/vision/face_detector/utils:MPPFaceDetectorResultHelpers",
|
||||||
],
|
],
|
||||||
|
|
|
@ -18,12 +18,10 @@
|
||||||
#import "mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.h"
|
#import "mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.h"
|
||||||
#import "mediapipe/tasks/ios/core/sources/MPPTaskInfo.h"
|
#import "mediapipe/tasks/ios/core/sources/MPPTaskInfo.h"
|
||||||
#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionPacketCreator.h"
|
#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionPacketCreator.h"
|
||||||
#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunner.h"
|
#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunnerRefactored.h"
|
||||||
#import "mediapipe/tasks/ios/vision/face_detector/utils/sources/MPPFaceDetectorOptions+Helpers.h"
|
#import "mediapipe/tasks/ios/vision/face_detector/utils/sources/MPPFaceDetectorOptions+Helpers.h"
|
||||||
#import "mediapipe/tasks/ios/vision/face_detector/utils/sources/MPPFaceDetectorResult+Helpers.h"
|
#import "mediapipe/tasks/ios/vision/face_detector/utils/sources/MPPFaceDetectorResult+Helpers.h"
|
||||||
|
|
||||||
using ::mediapipe::NormalizedRect;
|
|
||||||
using ::mediapipe::Packet;
|
|
||||||
using ::mediapipe::Timestamp;
|
using ::mediapipe::Timestamp;
|
||||||
using ::mediapipe::tasks::core::PacketMap;
|
using ::mediapipe::tasks::core::PacketMap;
|
||||||
using ::mediapipe::tasks::core::PacketsCallback;
|
using ::mediapipe::tasks::core::PacketsCallback;
|
||||||
|
@ -49,6 +47,12 @@ static NSString *const kTaskName = @"faceDetector";
|
||||||
} \
|
} \
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#define FaceDetectorResultWithOutputPacketMap(outputPacketMap) \
|
||||||
|
( \
|
||||||
|
[MPPFaceDetectorResult \
|
||||||
|
faceDetectorResultWithDetectionsPacket:outputPacketMap[kDetectionsStreamName.cppString]] \
|
||||||
|
)
|
||||||
|
|
||||||
@interface MPPFaceDetector () {
|
@interface MPPFaceDetector () {
|
||||||
/** iOS Vision Task Runner */
|
/** iOS Vision Task Runner */
|
||||||
MPPVisionTaskRunner *_visionTaskRunner;
|
MPPVisionTaskRunner *_visionTaskRunner;
|
||||||
|
@ -102,10 +106,12 @@ static NSString *const kTaskName = @"faceDetector";
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
_visionTaskRunner =
|
_visionTaskRunner = [[MPPVisionTaskRunner alloc] initWithTaskInfo:taskInfo
|
||||||
[[MPPVisionTaskRunner alloc] initWithCalculatorGraphConfig:[taskInfo generateGraphConfig]
|
|
||||||
runningMode:options.runningMode
|
runningMode:options.runningMode
|
||||||
|
roiAllowed:NO
|
||||||
packetsCallback:std::move(packetsCallback)
|
packetsCallback:std::move(packetsCallback)
|
||||||
|
imageInputStreamName:kImageInStreamName
|
||||||
|
normRectInputStreamName:kNormRectStreamName
|
||||||
error:error];
|
error:error];
|
||||||
|
|
||||||
if (!_visionTaskRunner) {
|
if (!_visionTaskRunner) {
|
||||||
|
@ -124,95 +130,29 @@ static NSString *const kTaskName = @"faceDetector";
|
||||||
return [self initWithOptions:options error:error];
|
return [self initWithOptions:options error:error];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (std::optional<PacketMap>)inputPacketMapWithMPPImage:(MPPImage *)image
|
|
||||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
|
||||||
error:(NSError **)error {
|
|
||||||
std::optional<NormalizedRect> rect =
|
|
||||||
[_visionTaskRunner normalizedRectWithImageOrientation:image.orientation
|
|
||||||
imageSize:CGSizeMake(image.width, image.height)
|
|
||||||
error:error];
|
|
||||||
if (!rect.has_value()) {
|
|
||||||
return std::nullopt;
|
|
||||||
}
|
|
||||||
|
|
||||||
Packet imagePacket = [MPPVisionPacketCreator createPacketWithMPPImage:image
|
|
||||||
timestampInMilliseconds:timestampInMilliseconds
|
|
||||||
error:error];
|
|
||||||
if (imagePacket.IsEmpty()) {
|
|
||||||
return std::nullopt;
|
|
||||||
}
|
|
||||||
|
|
||||||
Packet normalizedRectPacket =
|
|
||||||
[MPPVisionPacketCreator createPacketWithNormalizedRect:rect.value()
|
|
||||||
timestampInMilliseconds:timestampInMilliseconds];
|
|
||||||
|
|
||||||
PacketMap inputPacketMap = InputPacketMap(imagePacket, normalizedRectPacket);
|
|
||||||
return inputPacketMap;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (nullable MPPFaceDetectorResult *)detectInImage:(MPPImage *)image error:(NSError **)error {
|
- (nullable MPPFaceDetectorResult *)detectInImage:(MPPImage *)image error:(NSError **)error {
|
||||||
std::optional<NormalizedRect> rect =
|
std::optional<PacketMap> outputPacketMap = [_visionTaskRunner processImage:image error:error];
|
||||||
[_visionTaskRunner normalizedRectWithImageOrientation:image.orientation
|
|
||||||
imageSize:CGSizeMake(image.width, image.height)
|
|
||||||
error:error];
|
|
||||||
if (!rect.has_value()) {
|
|
||||||
return nil;
|
|
||||||
}
|
|
||||||
|
|
||||||
Packet imagePacket = [MPPVisionPacketCreator createPacketWithMPPImage:image error:error];
|
return [MPPFaceDetector faceDetectorResultWithOptionalOutputPacketMap:outputPacketMap];
|
||||||
if (imagePacket.IsEmpty()) {
|
|
||||||
return nil;
|
|
||||||
}
|
|
||||||
|
|
||||||
Packet normalizedRectPacket =
|
|
||||||
[MPPVisionPacketCreator createPacketWithNormalizedRect:rect.value()];
|
|
||||||
|
|
||||||
PacketMap inputPacketMap = InputPacketMap(imagePacket, normalizedRectPacket);
|
|
||||||
|
|
||||||
std::optional<PacketMap> outputPacketMap = [_visionTaskRunner processImagePacketMap:inputPacketMap
|
|
||||||
error:error];
|
|
||||||
if (!outputPacketMap.has_value()) {
|
|
||||||
return nil;
|
|
||||||
}
|
|
||||||
|
|
||||||
return [MPPFaceDetectorResult
|
|
||||||
faceDetectorResultWithDetectionsPacket:outputPacketMap
|
|
||||||
.value()[kDetectionsStreamName.cppString]];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
- (nullable MPPFaceDetectorResult *)detectInVideoFrame:(MPPImage *)image
|
- (nullable MPPFaceDetectorResult *)detectInVideoFrame:(MPPImage *)image
|
||||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
||||||
error:(NSError **)error {
|
error:(NSError **)error {
|
||||||
std::optional<PacketMap> inputPacketMap = [self inputPacketMapWithMPPImage:image
|
std::optional<PacketMap> outputPacketMap =
|
||||||
|
[_visionTaskRunner processVideoFrame:image
|
||||||
timestampInMilliseconds:timestampInMilliseconds
|
timestampInMilliseconds:timestampInMilliseconds
|
||||||
error:error];
|
error:error];
|
||||||
if (!inputPacketMap.has_value()) {
|
|
||||||
return nil;
|
|
||||||
}
|
|
||||||
|
|
||||||
std::optional<PacketMap> outputPacketMap =
|
return [MPPFaceDetector faceDetectorResultWithOptionalOutputPacketMap:outputPacketMap];
|
||||||
[_visionTaskRunner processVideoFramePacketMap:inputPacketMap.value() error:error];
|
|
||||||
|
|
||||||
if (!outputPacketMap.has_value()) {
|
|
||||||
return nil;
|
|
||||||
}
|
|
||||||
|
|
||||||
return [MPPFaceDetectorResult
|
|
||||||
faceDetectorResultWithDetectionsPacket:outputPacketMap
|
|
||||||
.value()[kDetectionsStreamName.cppString]];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
- (BOOL)detectAsyncInImage:(MPPImage *)image
|
- (BOOL)detectAsyncInImage:(MPPImage *)image
|
||||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
||||||
error:(NSError **)error {
|
error:(NSError **)error {
|
||||||
std::optional<PacketMap> inputPacketMap = [self inputPacketMapWithMPPImage:image
|
return [_visionTaskRunner processLiveStreamImage:image
|
||||||
timestampInMilliseconds:timestampInMilliseconds
|
timestampInMilliseconds:timestampInMilliseconds
|
||||||
error:error];
|
error:error];
|
||||||
if (!inputPacketMap.has_value()) {
|
|
||||||
return NO;
|
|
||||||
}
|
|
||||||
|
|
||||||
return [_visionTaskRunner processLiveStreamPacketMap:inputPacketMap.value() error:error];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)processLiveStreamResult:(absl::StatusOr<PacketMap>)liveStreamResult {
|
- (void)processLiveStreamResult:(absl::StatusOr<PacketMap>)liveStreamResult {
|
||||||
|
@ -237,9 +177,7 @@ static NSString *const kTaskName = @"faceDetector";
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
MPPFaceDetectorResult *result = [MPPFaceDetectorResult
|
MPPFaceDetectorResult *result = FaceDetectorResultWithOutputPacketMap(liveStreamResult.value());
|
||||||
faceDetectorResultWithDetectionsPacket:liveStreamResult
|
|
||||||
.value()[kDetectionsStreamName.cppString]];
|
|
||||||
|
|
||||||
NSInteger timeStampInMilliseconds =
|
NSInteger timeStampInMilliseconds =
|
||||||
outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() /
|
outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() /
|
||||||
|
@ -252,4 +190,13 @@ static NSString *const kTaskName = @"faceDetector";
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
+ (nullable MPPFaceDetectorResult *)faceDetectorResultWithOptionalOutputPacketMap:
|
||||||
|
(std::optional<PacketMap>)outputPacketMap {
|
||||||
|
if (!outputPacketMap.has_value()) {
|
||||||
|
return nil;
|
||||||
|
}
|
||||||
|
|
||||||
|
return FaceDetectorResultWithOutputPacketMap(outputPacketMap.value());
|
||||||
|
}
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
|
@ -57,7 +57,7 @@ objc_library(
|
||||||
"//mediapipe/tasks/ios/core:MPPTaskInfo",
|
"//mediapipe/tasks/ios/core:MPPTaskInfo",
|
||||||
"//mediapipe/tasks/ios/vision/core:MPPImage",
|
"//mediapipe/tasks/ios/vision/core:MPPImage",
|
||||||
"//mediapipe/tasks/ios/vision/core:MPPVisionPacketCreator",
|
"//mediapipe/tasks/ios/vision/core:MPPVisionPacketCreator",
|
||||||
"//mediapipe/tasks/ios/vision/core:MPPVisionTaskRunner",
|
"//mediapipe/tasks/ios/vision/core:MPPVisionTaskRunnerRefactored",
|
||||||
"//mediapipe/tasks/ios/vision/image_classifier/utils:MPPImageClassifierOptionsHelpers",
|
"//mediapipe/tasks/ios/vision/image_classifier/utils:MPPImageClassifierOptionsHelpers",
|
||||||
"//mediapipe/tasks/ios/vision/image_classifier/utils:MPPImageClassifierResultHelpers",
|
"//mediapipe/tasks/ios/vision/image_classifier/utils:MPPImageClassifierResultHelpers",
|
||||||
],
|
],
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
#import "mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.h"
|
#import "mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.h"
|
||||||
#import "mediapipe/tasks/ios/core/sources/MPPTaskInfo.h"
|
#import "mediapipe/tasks/ios/core/sources/MPPTaskInfo.h"
|
||||||
#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionPacketCreator.h"
|
#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionPacketCreator.h"
|
||||||
#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunner.h"
|
#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunnerRefactored.h"
|
||||||
#import "mediapipe/tasks/ios/vision/image_classifier/utils/sources/MPPImageClassifierOptions+Helpers.h"
|
#import "mediapipe/tasks/ios/vision/image_classifier/utils/sources/MPPImageClassifierOptions+Helpers.h"
|
||||||
#import "mediapipe/tasks/ios/vision/image_classifier/utils/sources/MPPImageClassifierResult+Helpers.h"
|
#import "mediapipe/tasks/ios/vision/image_classifier/utils/sources/MPPImageClassifierResult+Helpers.h"
|
||||||
|
|
||||||
|
@ -52,6 +52,13 @@ static const int kMicroSecondsPerMilliSecond = 1000;
|
||||||
} \
|
} \
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#define ImageClassifierResultWithOutputPacketMap(outputPacketMap) \
|
||||||
|
( \
|
||||||
|
[MPPImageClassifierResult \
|
||||||
|
imageClassifierResultWithClassificationsPacket:outputPacketMap[kClassificationsStreamName \
|
||||||
|
.cppString]] \
|
||||||
|
)
|
||||||
|
|
||||||
@interface MPPImageClassifier () {
|
@interface MPPImageClassifier () {
|
||||||
/** iOS Vision Task Runner */
|
/** iOS Vision Task Runner */
|
||||||
MPPVisionTaskRunner *_visionTaskRunner;
|
MPPVisionTaskRunner *_visionTaskRunner;
|
||||||
|
@ -63,43 +70,7 @@ static const int kMicroSecondsPerMilliSecond = 1000;
|
||||||
|
|
||||||
@implementation MPPImageClassifier
|
@implementation MPPImageClassifier
|
||||||
|
|
||||||
- (void)processLiveStreamResult:(absl::StatusOr<PacketMap>)liveStreamResult {
|
#pragma mark - Public
|
||||||
if (![self.imageClassifierLiveStreamDelegate
|
|
||||||
respondsToSelector:@selector
|
|
||||||
(imageClassifier:didFinishClassificationWithResult:timestampInMilliseconds:error:)]) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
NSError *callbackError = nil;
|
|
||||||
if (![MPPCommonUtils checkCppError:liveStreamResult.status() toError:&callbackError]) {
|
|
||||||
dispatch_async(_callbackQueue, ^{
|
|
||||||
[self.imageClassifierLiveStreamDelegate imageClassifier:self
|
|
||||||
didFinishClassificationWithResult:nil
|
|
||||||
timestampInMilliseconds:Timestamp::Unset().Value()
|
|
||||||
error:callbackError];
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
PacketMap &outputPacketMap = liveStreamResult.value();
|
|
||||||
if (outputPacketMap[kImageOutStreamName.cppString].IsEmpty()) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
MPPImageClassifierResult *result = [MPPImageClassifierResult
|
|
||||||
imageClassifierResultWithClassificationsPacket:outputPacketMap[kClassificationsStreamName
|
|
||||||
.cppString]];
|
|
||||||
|
|
||||||
NSInteger timeStampInMilliseconds =
|
|
||||||
outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() /
|
|
||||||
kMicroSecondsPerMilliSecond;
|
|
||||||
dispatch_async(_callbackQueue, ^{
|
|
||||||
[self.imageClassifierLiveStreamDelegate imageClassifier:self
|
|
||||||
didFinishClassificationWithResult:result
|
|
||||||
timestampInMilliseconds:timeStampInMilliseconds
|
|
||||||
error:callbackError];
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
- (instancetype)initWithOptions:(MPPImageClassifierOptions *)options error:(NSError **)error {
|
- (instancetype)initWithOptions:(MPPImageClassifierOptions *)options error:(NSError **)error {
|
||||||
self = [super init];
|
self = [super init];
|
||||||
|
@ -143,10 +114,12 @@ static const int kMicroSecondsPerMilliSecond = 1000;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
_visionTaskRunner =
|
_visionTaskRunner = [[MPPVisionTaskRunner alloc] initWithTaskInfo:taskInfo
|
||||||
[[MPPVisionTaskRunner alloc] initWithCalculatorGraphConfig:[taskInfo generateGraphConfig]
|
|
||||||
runningMode:options.runningMode
|
runningMode:options.runningMode
|
||||||
|
roiAllowed:YES
|
||||||
packetsCallback:std::move(packetsCallback)
|
packetsCallback:std::move(packetsCallback)
|
||||||
|
imageInputStreamName:kImageInStreamName
|
||||||
|
normRectInputStreamName:kNormRectStreamName
|
||||||
error:error];
|
error:error];
|
||||||
|
|
||||||
if (!_visionTaskRunner) {
|
if (!_visionTaskRunner) {
|
||||||
|
@ -167,90 +140,28 @@ static const int kMicroSecondsPerMilliSecond = 1000;
|
||||||
- (nullable MPPImageClassifierResult *)classifyImage:(MPPImage *)image
|
- (nullable MPPImageClassifierResult *)classifyImage:(MPPImage *)image
|
||||||
regionOfInterest:(CGRect)roi
|
regionOfInterest:(CGRect)roi
|
||||||
error:(NSError **)error {
|
error:(NSError **)error {
|
||||||
std::optional<NormalizedRect> rect =
|
std::optional<PacketMap> outputPacketMap = [_visionTaskRunner processImage:image
|
||||||
[_visionTaskRunner normalizedRectWithRegionOfInterest:roi
|
regionOfInterest:roi
|
||||||
imageOrientation:image.orientation
|
|
||||||
imageSize:CGSizeMake(image.width, image.height)
|
|
||||||
error:error];
|
error:error];
|
||||||
if (!rect.has_value()) {
|
|
||||||
return nil;
|
|
||||||
}
|
|
||||||
|
|
||||||
Packet imagePacket = [MPPVisionPacketCreator createPacketWithMPPImage:image error:error];
|
return [MPPImageClassifier imageClassifierResultWithOptionalOutputPacketMap:outputPacketMap];
|
||||||
if (imagePacket.IsEmpty()) {
|
|
||||||
return nil;
|
|
||||||
}
|
|
||||||
|
|
||||||
Packet normalizedRectPacket =
|
|
||||||
[MPPVisionPacketCreator createPacketWithNormalizedRect:rect.value()];
|
|
||||||
|
|
||||||
PacketMap inputPacketMap = InputPacketMap(imagePacket, normalizedRectPacket);
|
|
||||||
|
|
||||||
std::optional<PacketMap> outputPacketMap = [_visionTaskRunner processImagePacketMap:inputPacketMap
|
|
||||||
error:error];
|
|
||||||
if (!outputPacketMap.has_value()) {
|
|
||||||
return nil;
|
|
||||||
}
|
|
||||||
|
|
||||||
return
|
|
||||||
[MPPImageClassifierResult imageClassifierResultWithClassificationsPacket:
|
|
||||||
outputPacketMap.value()[kClassificationsStreamName.cppString]];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
- (nullable MPPImageClassifierResult *)classifyImage:(MPPImage *)image error:(NSError **)error {
|
- (nullable MPPImageClassifierResult *)classifyImage:(MPPImage *)image error:(NSError **)error {
|
||||||
return [self classifyImage:image regionOfInterest:CGRectZero error:error];
|
return [self classifyImage:image regionOfInterest:CGRectZero error:error];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (std::optional<PacketMap>)inputPacketMapWithMPPImage:(MPPImage *)image
|
|
||||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
|
||||||
regionOfInterest:(CGRect)roi
|
|
||||||
error:(NSError **)error {
|
|
||||||
std::optional<NormalizedRect> rect =
|
|
||||||
[_visionTaskRunner normalizedRectWithRegionOfInterest:roi
|
|
||||||
imageOrientation:image.orientation
|
|
||||||
imageSize:CGSizeMake(image.width, image.height)
|
|
||||||
error:error];
|
|
||||||
if (!rect.has_value()) {
|
|
||||||
return std::nullopt;
|
|
||||||
}
|
|
||||||
|
|
||||||
Packet imagePacket = [MPPVisionPacketCreator createPacketWithMPPImage:image
|
|
||||||
timestampInMilliseconds:timestampInMilliseconds
|
|
||||||
error:error];
|
|
||||||
if (imagePacket.IsEmpty()) {
|
|
||||||
return std::nullopt;
|
|
||||||
}
|
|
||||||
|
|
||||||
Packet normalizedRectPacket =
|
|
||||||
[MPPVisionPacketCreator createPacketWithNormalizedRect:rect.value()
|
|
||||||
timestampInMilliseconds:timestampInMilliseconds];
|
|
||||||
|
|
||||||
PacketMap inputPacketMap = InputPacketMap(imagePacket, normalizedRectPacket);
|
|
||||||
return inputPacketMap;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (nullable MPPImageClassifierResult *)classifyVideoFrame:(MPPImage *)image
|
- (nullable MPPImageClassifierResult *)classifyVideoFrame:(MPPImage *)image
|
||||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
||||||
regionOfInterest:(CGRect)roi
|
regionOfInterest:(CGRect)roi
|
||||||
error:(NSError **)error {
|
error:(NSError **)error {
|
||||||
std::optional<PacketMap> inputPacketMap = [self inputPacketMapWithMPPImage:image
|
|
||||||
timestampInMilliseconds:timestampInMilliseconds
|
|
||||||
regionOfInterest:roi
|
|
||||||
error:error];
|
|
||||||
if (!inputPacketMap.has_value()) {
|
|
||||||
return nil;
|
|
||||||
}
|
|
||||||
|
|
||||||
std::optional<PacketMap> outputPacketMap =
|
std::optional<PacketMap> outputPacketMap =
|
||||||
[_visionTaskRunner processVideoFramePacketMap:inputPacketMap.value() error:error];
|
[_visionTaskRunner processVideoFrame:image
|
||||||
|
regionOfInterest:roi
|
||||||
|
timestampInMilliseconds:timestampInMilliseconds
|
||||||
|
error:error];
|
||||||
|
|
||||||
if (!outputPacketMap.has_value()) {
|
return [MPPImageClassifier imageClassifierResultWithOptionalOutputPacketMap:outputPacketMap];
|
||||||
return nil;
|
|
||||||
}
|
|
||||||
|
|
||||||
return
|
|
||||||
[MPPImageClassifierResult imageClassifierResultWithClassificationsPacket:
|
|
||||||
outputPacketMap.value()[kClassificationsStreamName.cppString]];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
- (nullable MPPImageClassifierResult *)classifyVideoFrame:(MPPImage *)image
|
- (nullable MPPImageClassifierResult *)classifyVideoFrame:(MPPImage *)image
|
||||||
|
@ -266,15 +177,10 @@ static const int kMicroSecondsPerMilliSecond = 1000;
|
||||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
||||||
regionOfInterest:(CGRect)roi
|
regionOfInterest:(CGRect)roi
|
||||||
error:(NSError **)error {
|
error:(NSError **)error {
|
||||||
std::optional<PacketMap> inputPacketMap = [self inputPacketMapWithMPPImage:image
|
return [_visionTaskRunner processLiveStreamImage:image
|
||||||
timestampInMilliseconds:timestampInMilliseconds
|
|
||||||
regionOfInterest:roi
|
regionOfInterest:roi
|
||||||
|
timestampInMilliseconds:timestampInMilliseconds
|
||||||
error:error];
|
error:error];
|
||||||
if (!inputPacketMap.has_value()) {
|
|
||||||
return NO;
|
|
||||||
}
|
|
||||||
|
|
||||||
return [_visionTaskRunner processLiveStreamPacketMap:inputPacketMap.value() error:error];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
- (BOOL)classifyAsyncImage:(MPPImage *)image
|
- (BOOL)classifyAsyncImage:(MPPImage *)image
|
||||||
|
@ -286,4 +192,51 @@ static const int kMicroSecondsPerMilliSecond = 1000;
|
||||||
error:error];
|
error:error];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#pragma mark - Private
|
||||||
|
|
||||||
|
- (void)processLiveStreamResult:(absl::StatusOr<PacketMap>)liveStreamResult {
|
||||||
|
if (![self.imageClassifierLiveStreamDelegate
|
||||||
|
respondsToSelector:@selector
|
||||||
|
(imageClassifier:didFinishClassificationWithResult:timestampInMilliseconds:error:)]) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
NSError *callbackError = nil;
|
||||||
|
if (![MPPCommonUtils checkCppError:liveStreamResult.status() toError:&callbackError]) {
|
||||||
|
dispatch_async(_callbackQueue, ^{
|
||||||
|
[self.imageClassifierLiveStreamDelegate imageClassifier:self
|
||||||
|
didFinishClassificationWithResult:nil
|
||||||
|
timestampInMilliseconds:Timestamp::Unset().Value()
|
||||||
|
error:callbackError];
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
PacketMap &outputPacketMap = liveStreamResult.value();
|
||||||
|
if (outputPacketMap[kImageOutStreamName.cppString].IsEmpty()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
MPPImageClassifierResult *result = ImageClassifierResultWithOutputPacketMap(outputPacketMap);
|
||||||
|
|
||||||
|
NSInteger timeStampInMilliseconds =
|
||||||
|
outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() /
|
||||||
|
kMicroSecondsPerMilliSecond;
|
||||||
|
dispatch_async(_callbackQueue, ^{
|
||||||
|
[self.imageClassifierLiveStreamDelegate imageClassifier:self
|
||||||
|
didFinishClassificationWithResult:result
|
||||||
|
timestampInMilliseconds:timeStampInMilliseconds
|
||||||
|
error:callbackError];
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
+ (nullable MPPImageClassifierResult *)imageClassifierResultWithOptionalOutputPacketMap:
|
||||||
|
(std::optional<PacketMap>)outputPacketMap {
|
||||||
|
if (!outputPacketMap.has_value()) {
|
||||||
|
return nil;
|
||||||
|
}
|
||||||
|
|
||||||
|
return ImageClassifierResultWithOutputPacketMap(outputPacketMap.value());
|
||||||
|
}
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
Loading…
Reference in New Issue
Block a user