Merge pull request #4373 from priankakariatyml:ios-object-detector-async-fixes

PiperOrigin-RevId: 530233608
This commit is contained in:
Copybara-Service 2023-05-08 01:14:01 -07:00
commit 1360977730
13 changed files with 242 additions and 71 deletions

View File

@ -24,6 +24,7 @@ NS_ASSUME_NONNULL_BEGIN
+ (NSString *)stringWithCppString:(std::string)text; + (NSString *)stringWithCppString:(std::string)text;
+ (NSString *)uuidString;
@end @end
NS_ASSUME_NONNULL_END NS_ASSUME_NONNULL_END

View File

@ -24,4 +24,8 @@
return [NSString stringWithCString:text.c_str() encoding:[NSString defaultCStringEncoding]]; return [NSString stringWithCString:text.c_str() encoding:[NSString defaultCStringEncoding]];
} }
+ (NSString *)uuidString {
return [[NSUUID UUID] UUIDString];
}
@end @end

View File

@ -28,7 +28,12 @@
return self; return self;
} }
// TODO: Implement hash - (NSUInteger)hash {
NSUInteger nonNullPropertiesHash =
@(self.location.x).hash ^ @(self.location.y).hash ^ @(self.score).hash;
return self.label ? nonNullPropertiesHash ^ self.label.hash : nonNullPropertiesHash;
}
- (BOOL)isEqual:(nullable id)object { - (BOOL)isEqual:(nullable id)object {
if (!object) { if (!object) {

View File

@ -453,8 +453,8 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks";
code:MPPTasksErrorCodeInvalidArgumentError code:MPPTasksErrorCodeInvalidArgumentError
userInfo:@{ userInfo:@{
NSLocalizedDescriptionKey : NSLocalizedDescriptionKey :
@"The vision task is in image or video mode, a " @"The vision task is in image or video mode. The "
@"user-defined result callback should not be provided." @"delegate must not be set in the task's options."
}]]; }]];
} }
} }
@ -470,8 +470,10 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks";
code:MPPTasksErrorCodeInvalidArgumentError code:MPPTasksErrorCodeInvalidArgumentError
userInfo:@{ userInfo:@{
NSLocalizedDescriptionKey : NSLocalizedDescriptionKey :
@"The vision task is in live stream mode, a " @"The vision task is in live stream mode. An "
@"user-defined result callback must be provided." @"object must be set as the delegate of the task "
@"in its options to ensure asynchronous delivery "
@"of results."
}]]; }]];
} }

View File

@ -25,6 +25,8 @@ static NSDictionary *const kCatsAndDogsRotatedImage =
static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks"; static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks";
static const float pixelDifferenceTolerance = 10.0f; static const float pixelDifferenceTolerance = 10.0f;
static const float scoreDifferenceTolerance = 0.02f; static const float scoreDifferenceTolerance = 0.02f;
static NSString *const kLiveStreamTestsDictObjectDetectorKey = @"object_detector";
static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation";
#define AssertEqualErrors(error, expectedError) \ #define AssertEqualErrors(error, expectedError) \
XCTAssertNotNil(error); \ XCTAssertNotNil(error); \
@ -58,7 +60,10 @@ static const float scoreDifferenceTolerance = 0.02f;
XCTAssertEqualWithAccuracy(boundingBox.size.height, expectedBoundingBox.size.height, \ XCTAssertEqualWithAccuracy(boundingBox.size.height, expectedBoundingBox.size.height, \
pixelDifferenceTolerance, @"index i = %d", idx); pixelDifferenceTolerance, @"index i = %d", idx);
@interface MPPObjectDetectorTests : XCTestCase @interface MPPObjectDetectorTests : XCTestCase <MPPObjectDetectorLiveStreamDelegate> {
NSDictionary *liveStreamSucceedsTestDict;
NSDictionary *outOfOrderTimestampTestDict;
}
@end @end
@implementation MPPObjectDetectorTests @implementation MPPObjectDetectorTests
@ -446,31 +451,28 @@ static const float scoreDifferenceTolerance = 0.02f;
#pragma mark Running Mode Tests #pragma mark Running Mode Tests
- (void)testCreateObjectDetectorFailsWithResultListenerInNonLiveStreamMode { - (void)testCreateObjectDetectorFailsWithDelegateInNonLiveStreamMode {
MPPRunningMode runningModesToTest[] = {MPPRunningModeImage, MPPRunningModeVideo}; MPPRunningMode runningModesToTest[] = {MPPRunningModeImage, MPPRunningModeVideo};
for (int i = 0; i < sizeof(runningModesToTest) / sizeof(runningModesToTest[0]); i++) { for (int i = 0; i < sizeof(runningModesToTest) / sizeof(runningModesToTest[0]); i++) {
MPPObjectDetectorOptions *options = [self objectDetectorOptionsWithModelName:kModelName]; MPPObjectDetectorOptions *options = [self objectDetectorOptionsWithModelName:kModelName];
options.runningMode = runningModesToTest[i]; options.runningMode = runningModesToTest[i];
options.completion = options.objectDetectorLiveStreamDelegate = self;
^(MPPObjectDetectionResult *result, NSInteger timestampInMilliseconds, NSError *error) {
};
[self [self
assertCreateObjectDetectorWithOptions:options assertCreateObjectDetectorWithOptions:options
failsWithExpectedError: failsWithExpectedError:
[NSError [NSError errorWithDomain:kExpectedErrorDomain
errorWithDomain:kExpectedErrorDomain code:MPPTasksErrorCodeInvalidArgumentError
code:MPPTasksErrorCodeInvalidArgumentError userInfo:@{
userInfo:@{ NSLocalizedDescriptionKey :
NSLocalizedDescriptionKey : @"The vision task is in image or video mode. The "
@"The vision task is in image or video mode, a " @"delegate must not be set in the task's options."
@"user-defined result callback should not be provided." }]];
}]];
} }
} }
- (void)testCreateObjectDetectorFailsWithMissingResultListenerInLiveStreamMode { - (void)testCreateObjectDetectorFailsWithMissingDelegateInLiveStreamMode {
MPPObjectDetectorOptions *options = [self objectDetectorOptionsWithModelName:kModelName]; MPPObjectDetectorOptions *options = [self objectDetectorOptionsWithModelName:kModelName];
options.runningMode = MPPRunningModeLiveStream; options.runningMode = MPPRunningModeLiveStream;
@ -481,8 +483,10 @@ static const float scoreDifferenceTolerance = 0.02f;
code:MPPTasksErrorCodeInvalidArgumentError code:MPPTasksErrorCodeInvalidArgumentError
userInfo:@{ userInfo:@{
NSLocalizedDescriptionKey : NSLocalizedDescriptionKey :
@"The vision task is in live stream mode, a " @"The vision task is in live stream mode. An "
@"user-defined result callback must be provided." @"object must be set as the delegate of the task "
@"in its options to ensure asynchronous delivery "
@"of results."
}]]; }]];
} }
@ -563,10 +567,7 @@ static const float scoreDifferenceTolerance = 0.02f;
MPPObjectDetectorOptions *options = [self objectDetectorOptionsWithModelName:kModelName]; MPPObjectDetectorOptions *options = [self objectDetectorOptionsWithModelName:kModelName];
options.runningMode = MPPRunningModeLiveStream; options.runningMode = MPPRunningModeLiveStream;
options.completion = options.objectDetectorLiveStreamDelegate = self;
^(MPPObjectDetectionResult *result, NSInteger timestampInMilliseconds, NSError *error) {
};
MPPObjectDetector *objectDetector = [self objectDetectorWithOptionsSucceeds:options]; MPPObjectDetector *objectDetector = [self objectDetectorWithOptionsSucceeds:options];
@ -631,23 +632,17 @@ static const float scoreDifferenceTolerance = 0.02f;
options.maxResults = maxResults; options.maxResults = maxResults;
options.runningMode = MPPRunningModeLiveStream; options.runningMode = MPPRunningModeLiveStream;
options.objectDetectorLiveStreamDelegate = self;
XCTestExpectation *expectation = [[XCTestExpectation alloc] XCTestExpectation *expectation = [[XCTestExpectation alloc]
initWithDescription:@"detectWithOutOfOrderTimestampsAndLiveStream"]; initWithDescription:@"detectWithOutOfOrderTimestampsAndLiveStream"];
expectation.expectedFulfillmentCount = 1; expectation.expectedFulfillmentCount = 1;
options.completion =
^(MPPObjectDetectionResult *result, NSInteger timestampInMilliseconds, NSError *error) {
[self assertObjectDetectionResult:result
isEqualToExpectedResult:
[MPPObjectDetectorTests
expectedDetectionResultForCatsAndDogsImageWithTimestampInMilliseconds:
timestampInMilliseconds]
expectedDetectionsCount:maxResults];
[expectation fulfill];
};
MPPObjectDetector *objectDetector = [self objectDetectorWithOptionsSucceeds:options]; MPPObjectDetector *objectDetector = [self objectDetectorWithOptionsSucceeds:options];
liveStreamSucceedsTestDict = @{
kLiveStreamTestsDictObjectDetectorKey : objectDetector,
kLiveStreamTestsDictExpectationKey : expectation
};
MPPImage *image = [self imageWithFileInfo:kCatsAndDogsImage]; MPPImage *image = [self imageWithFileInfo:kCatsAndDogsImage];
@ -664,7 +659,9 @@ static const float scoreDifferenceTolerance = 0.02f;
@"INVALID_ARGUMENT: Input timestamp must be monotonically increasing." @"INVALID_ARGUMENT: Input timestamp must be monotonically increasing."
}]; }];
AssertEqualErrors(error, expectedError); AssertEqualErrors(error, expectedError);
[self waitForExpectations:@[ expectation ] timeout:1.0];
NSTimeInterval timeout = 0.5f;
[self waitForExpectations:@[ expectation ] timeout:timeout];
} }
- (void)testDetectWithLiveStreamModeSucceeds { - (void)testDetectWithLiveStreamModeSucceeds {
@ -693,19 +690,15 @@ static const float scoreDifferenceTolerance = 0.02f;
expectation.expectedFulfillmentCount = iterationCount + 1; expectation.expectedFulfillmentCount = iterationCount + 1;
expectation.inverted = YES; expectation.inverted = YES;
options.completion = options.objectDetectorLiveStreamDelegate = self;
^(MPPObjectDetectionResult *result, NSInteger timestampInMilliseconds, NSError *error) {
[self assertObjectDetectionResult:result
isEqualToExpectedResult:
[MPPObjectDetectorTests
expectedDetectionResultForCatsAndDogsImageWithTimestampInMilliseconds:
timestampInMilliseconds]
expectedDetectionsCount:maxResults];
[expectation fulfill];
};
MPPObjectDetector *objectDetector = [self objectDetectorWithOptionsSucceeds:options]; MPPObjectDetector *objectDetector = [self objectDetectorWithOptionsSucceeds:options];
liveStreamSucceedsTestDict = @{
kLiveStreamTestsDictObjectDetectorKey : objectDetector,
kLiveStreamTestsDictExpectationKey : expectation
};
// TODO: Mimic initialization from CMSampleBuffer as live stream mode is most likely to be used // TODO: Mimic initialization from CMSampleBuffer as live stream mode is most likely to be used
// with the iOS camera. AVCaptureVideoDataOutput sample buffer delegates provide frames of type // with the iOS camera. AVCaptureVideoDataOutput sample buffer delegates provide frames of type
// `CMSampleBuffer`. // `CMSampleBuffer`.
@ -715,7 +708,28 @@ static const float scoreDifferenceTolerance = 0.02f;
XCTAssertTrue([objectDetector detectAsyncInImage:image timestampInMilliseconds:i error:nil]); XCTAssertTrue([objectDetector detectAsyncInImage:image timestampInMilliseconds:i error:nil]);
} }
[self waitForExpectations:@[ expectation ] timeout:0.5]; NSTimeInterval timeout = 0.5f;
[self waitForExpectations:@[ expectation ] timeout:timeout];
}
#pragma mark MPPObjectDetectorLiveStreamDelegate Methods
- (void)objectDetector:(MPPObjectDetector *)objectDetector
didFinishDetectionWithResult:(MPPObjectDetectionResult *)objectDetectionResult
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
error:(NSError *)error {
NSInteger maxResults = 4;
[self assertObjectDetectionResult:objectDetectionResult
isEqualToExpectedResult:
[MPPObjectDetectorTests
expectedDetectionResultForCatsAndDogsImageWithTimestampInMilliseconds:
timestampInMilliseconds]
expectedDetectionsCount:maxResults];
if (objectDetector == outOfOrderTimestampTestDict[kLiveStreamTestsDictObjectDetectorKey]) {
[outOfOrderTimestampTestDict[kLiveStreamTestsDictExpectationKey] fulfill];
} else if (objectDetector == liveStreamSucceedsTestDict[kLiveStreamTestsDictObjectDetectorKey]) {
[liveStreamSucceedsTestDict[kLiveStreamTestsDictExpectationKey] fulfill];
}
} }
@end @end

View File

@ -58,6 +58,7 @@ objc_library(
"//mediapipe/framework/formats:rect_cc_proto", "//mediapipe/framework/formats:rect_cc_proto",
"//mediapipe/tasks/ios/common:MPPCommon", "//mediapipe/tasks/ios/common:MPPCommon",
"//mediapipe/tasks/ios/common/utils:MPPCommonUtils", "//mediapipe/tasks/ios/common/utils:MPPCommonUtils",
"//mediapipe/tasks/ios/common/utils:NSStringHelpers",
"//mediapipe/tasks/ios/core:MPPTaskRunner", "//mediapipe/tasks/ios/core:MPPTaskRunner",
"//third_party/apple_frameworks:UIKit", "//third_party/apple_frameworks:UIKit",
"@com_google_absl//absl/status:statusor", "@com_google_absl//absl/status:statusor",

View File

@ -141,6 +141,20 @@ NS_ASSUME_NONNULL_BEGIN
(mediapipe::tasks::core::PacketsCallback)packetsCallback (mediapipe::tasks::core::PacketsCallback)packetsCallback
error:(NSError **)error NS_UNAVAILABLE; error:(NSError **)error NS_UNAVAILABLE;
/**
* This method returns a unique dispatch queue name by adding the given suffix and a `UUID` to the
* pre-defined queue name prefix for vision tasks. The vision tasks can use this method to get
* unique dispatch queue names which are consistent with other vision tasks.
* Dispatch queue names need not be unique, but for easy debugging we ensure that the queue names
* are unique.
*
* @param suffix A suffix that identifies a dispatch queue's functionality.
*
* @return A unique dispatch queue name by adding the given suffix and a `UUID` to the pre-defined
* queue name prefix for vision tasks.
*/
+ (const char *)uniqueDispatchQueueNameWithSuffix:(NSString *)suffix;
- (instancetype)init NS_UNAVAILABLE; - (instancetype)init NS_UNAVAILABLE;
+ (instancetype)new NS_UNAVAILABLE; + (instancetype)new NS_UNAVAILABLE;

View File

@ -16,6 +16,7 @@
#import "mediapipe/tasks/ios/common/sources/MPPCommon.h" #import "mediapipe/tasks/ios/common/sources/MPPCommon.h"
#import "mediapipe/tasks/ios/common/utils/sources/MPPCommonUtils.h" #import "mediapipe/tasks/ios/common/utils/sources/MPPCommonUtils.h"
#import "mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.h"
#include "absl/status/statusor.h" #include "absl/status/statusor.h"
@ -37,6 +38,8 @@ static const NSInteger kMPPOrientationDegreesDown = -180;
/** Rotation degrees for a 90 degree rotation to the left. */ /** Rotation degrees for a 90 degree rotation to the left. */
static const NSInteger kMPPOrientationDegreesLeft = -270; static const NSInteger kMPPOrientationDegreesLeft = -270;
static NSString *const kTaskPrefix = @"com.mediapipe.tasks.vision";
@interface MPPVisionTaskRunner () { @interface MPPVisionTaskRunner () {
MPPRunningMode _runningMode; MPPRunningMode _runningMode;
} }
@ -54,18 +57,21 @@ static const NSInteger kMPPOrientationDegreesLeft = -270;
if (packetsCallback) { if (packetsCallback) {
[MPPCommonUtils createCustomError:error [MPPCommonUtils createCustomError:error
withCode:MPPTasksErrorCodeInvalidArgumentError withCode:MPPTasksErrorCodeInvalidArgumentError
description:@"The vision task is in image or video mode, a " description:@"The vision task is in image or video mode. The "
@"user-defined result callback should not be provided."]; @"delegate must not be set in the task's options."];
return nil; return nil;
} }
break; break;
} }
case MPPRunningModeLiveStream: { case MPPRunningModeLiveStream: {
if (!packetsCallback) { if (!packetsCallback) {
[MPPCommonUtils createCustomError:error [MPPCommonUtils
withCode:MPPTasksErrorCodeInvalidArgumentError createCustomError:error
description:@"The vision task is in live stream mode, a user-defined " withCode:MPPTasksErrorCodeInvalidArgumentError
@"result callback must be provided."]; description:
@"The vision task is in live stream mode. An object must be set as the "
@"delegate of the task in its options to ensure asynchronous delivery of "
@"results."];
return nil; return nil;
} }
break; break;
@ -197,4 +203,9 @@ static const NSInteger kMPPOrientationDegreesLeft = -270;
return [self sendPacketMap:packetMap error:error]; return [self sendPacketMap:packetMap error:error];
} }
+ (const char *)uniqueDispatchQueueNameWithSuffix:(NSString *)suffix {
return [NSString stringWithFormat:@"%@.%@_%@", kTaskPrefix, suffix, [NSString uuidString]]
.UTF8String;
}
@end @end

View File

@ -162,6 +162,7 @@ using ::mediapipe::ImageFrame;
OSType pixelBufferFormat = CVPixelBufferGetPixelFormatType(pixelBuffer); OSType pixelBufferFormat = CVPixelBufferGetPixelFormatType(pixelBuffer);
switch (pixelBufferFormat) { switch (pixelBufferFormat) {
case kCVPixelFormatType_32RGBA:
case kCVPixelFormatType_32BGRA: { case kCVPixelFormatType_32BGRA: {
return [MPPCVPixelBufferUtils rgbImageFrameFromCVPixelBuffer:pixelBuffer error:error]; return [MPPCVPixelBufferUtils rgbImageFrameFromCVPixelBuffer:pixelBuffer error:error];
} }
@ -169,7 +170,8 @@ using ::mediapipe::ImageFrame;
[MPPCommonUtils createCustomError:error [MPPCommonUtils createCustomError:error
withCode:MPPTasksErrorCodeInvalidArgumentError withCode:MPPTasksErrorCodeInvalidArgumentError
description:@"Unsupported pixel format for CVPixelBuffer. Supported " description:@"Unsupported pixel format for CVPixelBuffer. Supported "
@"pixel format types are kCVPixelFormatType_32BGRA"]; @"pixel format types are kCVPixelFormatType_32BGRA and "
@"kCVPixelFormatType_32RGBA"];
} }
} }

View File

@ -96,6 +96,15 @@ NS_SWIFT_NAME(ObjectDetector)
* `MPPImage`. Only use this method when the `MPPObjectDetector` is created with * `MPPImage`. Only use this method when the `MPPObjectDetector` is created with
* `MPPRunningModeImage`. * `MPPRunningModeImage`.
* *
* This method supports classification of RGBA images. If your `MPPImage` has a source type of
* `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer
* must have one of the following pixel format types:
* 1. kCVPixelFormatType_32BGRA
* 2. kCVPixelFormatType_32RGBA
*
* If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is
* RGB with an Alpha channel.
*
* @param image The `MPPImage` on which object detection is to be performed. * @param image The `MPPImage` on which object detection is to be performed.
* @param error An optional error parameter populated when there is an error in performing object * @param error An optional error parameter populated when there is an error in performing object
* detection on the input image. * detection on the input image.
@ -115,6 +124,15 @@ NS_SWIFT_NAME(ObjectDetector)
* the provided `MPPImage`. Only use this method when the `MPPObjectDetector` is created with * the provided `MPPImage`. Only use this method when the `MPPObjectDetector` is created with
* `MPPRunningModeVideo`. * `MPPRunningModeVideo`.
* *
* This method supports classification of RGBA images. If your `MPPImage` has a source type of
* `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer
* must have one of the following pixel format types:
* 1. kCVPixelFormatType_32BGRA
* 2. kCVPixelFormatType_32RGBA
*
* If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is
* RGB with an Alpha channel.
*
* @param image The `MPPImage` on which object detection is to be performed. * @param image The `MPPImage` on which object detection is to be performed.
* @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input * @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input
* timestamps must be monotonically increasing. * timestamps must be monotonically increasing.
@ -135,12 +153,28 @@ NS_SWIFT_NAME(ObjectDetector)
* Sends live stream image data of type `MPPImage` to perform object detection using the whole * Sends live stream image data of type `MPPImage` to perform object detection using the whole
* image as region of interest. Rotation will be applied according to the `orientation` property of * image as region of interest. Rotation will be applied according to the `orientation` property of
* the provided `MPPImage`. Only use this method when the `MPPObjectDetector` is created with * the provided `MPPImage`. Only use this method when the `MPPObjectDetector` is created with
* `MPPRunningModeLiveStream`. Results are provided asynchronously via the `completion` callback * `MPPRunningModeLiveStream`.
* provided in the `MPPObjectDetectorOptions`. *
* The object which needs to be continuously notified of the available results of object
* detection must confirm to `MPPObjectDetectorLiveStreamDelegate` protocol and implement the
* `objectDetector:didFinishDetectionWithResult:timestampInMilliseconds:error:` delegate method.
* *
* It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent * It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent
* to the object detector. The input timestamps must be monotonically increasing. * to the object detector. The input timestamps must be monotonically increasing.
* *
* This method supports classification of RGBA images. If your `MPPImage` has a source type of
* `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer
* must have one of the following pixel format types:
* 1. kCVPixelFormatType_32BGRA
* 2. kCVPixelFormatType_32RGBA
*
* If the input `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color
* space is RGB with an Alpha channel.
*
* If this method is used for classifying live camera frames using `AVFoundation`, ensure that you
* request `AVCaptureVideoDataOutput` to output frames in `kCMPixelFormat_32RGBA` using its
* `videoSettings` property.
*
* @param image A live stream image data of type `MPPImage` on which object detection is to be * @param image A live stream image data of type `MPPImage` on which object detection is to be
* performed. * performed.
* @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input * @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input

View File

@ -37,8 +37,8 @@ static NSString *const kImageOutStreamName = @"image_out";
static NSString *const kImageTag = @"IMAGE"; static NSString *const kImageTag = @"IMAGE";
static NSString *const kNormRectStreamName = @"norm_rect_in"; static NSString *const kNormRectStreamName = @"norm_rect_in";
static NSString *const kNormRectTag = @"NORM_RECT"; static NSString *const kNormRectTag = @"NORM_RECT";
static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorGraph"; static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorGraph";
static NSString *const kTaskName = @"objectDetector";
#define InputPacketMap(imagePacket, normalizedRectPacket) \ #define InputPacketMap(imagePacket, normalizedRectPacket) \
{ \ { \
@ -51,6 +51,7 @@ static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorG
/** iOS Vision Task Runner */ /** iOS Vision Task Runner */
MPPVisionTaskRunner *_visionTaskRunner; MPPVisionTaskRunner *_visionTaskRunner;
} }
@property(nonatomic, weak) id<MPPObjectDetectorLiveStreamDelegate> objectDetectorLiveStreamDelegate;
@end @end
@implementation MPPObjectDetector @implementation MPPObjectDetector
@ -78,11 +79,37 @@ static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorG
PacketsCallback packetsCallback = nullptr; PacketsCallback packetsCallback = nullptr;
if (options.completion) { if (options.objectDetectorLiveStreamDelegate) {
_objectDetectorLiveStreamDelegate = options.objectDetectorLiveStreamDelegate;
// Capturing `self` as weak in order to avoid `self` being kept in memory
// and cause a retain cycle, after self is set to `nil`.
MPPObjectDetector *__weak weakSelf = self;
// Create a private serial dispatch queue in which the delegate method will be called
// asynchronously. This is to ensure that if the client performs a long running operation in
// the delegate method, the queue on which the C++ callbacks is invoked is not blocked and is
// freed up to continue with its operations.
dispatch_queue_t callbackQueue = dispatch_queue_create(
[MPPVisionTaskRunner uniqueDispatchQueueNameWithSuffix:kTaskName], NULL);
packetsCallback = [=](absl::StatusOr<PacketMap> statusOrPackets) { packetsCallback = [=](absl::StatusOr<PacketMap> statusOrPackets) {
if (!weakSelf) {
return;
}
if (![weakSelf.objectDetectorLiveStreamDelegate
respondsToSelector:@selector
(objectDetector:didFinishDetectionWithResult:timestampInMilliseconds:error:)]) {
return;
}
NSError *callbackError = nil; NSError *callbackError = nil;
if (![MPPCommonUtils checkCppError:statusOrPackets.status() toError:&callbackError]) { if (![MPPCommonUtils checkCppError:statusOrPackets.status() toError:&callbackError]) {
options.completion(nil, Timestamp::Unset().Value(), callbackError); dispatch_async(callbackQueue, ^{
[weakSelf.objectDetectorLiveStreamDelegate objectDetector:weakSelf
didFinishDetectionWithResult:nil
timestampInMilliseconds:Timestamp::Unset().Value()
error:callbackError];
});
return; return;
} }
@ -95,10 +122,15 @@ static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorG
objectDetectionResultWithDetectionsPacket:statusOrPackets.value()[kDetectionsStreamName objectDetectionResultWithDetectionsPacket:statusOrPackets.value()[kDetectionsStreamName
.cppString]]; .cppString]];
options.completion(result, NSInteger timeStampInMilliseconds =
outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() / outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() /
kMicroSecondsPerMilliSecond, kMicroSecondsPerMilliSecond;
callbackError); dispatch_async(callbackQueue, ^{
[weakSelf.objectDetectorLiveStreamDelegate objectDetector:weakSelf
didFinishDetectionWithResult:result
timestampInMilliseconds:timeStampInMilliseconds
error:callbackError];
});
}; };
} }
@ -112,6 +144,7 @@ static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorG
return nil; return nil;
} }
} }
return self; return self;
} }
@ -224,5 +257,4 @@ static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorG
return [_visionTaskRunner processLiveStreamPacketMap:inputPacketMap.value() error:error]; return [_visionTaskRunner processLiveStreamPacketMap:inputPacketMap.value() error:error];
} }
@end @end

View File

@ -20,19 +20,70 @@
NS_ASSUME_NONNULL_BEGIN NS_ASSUME_NONNULL_BEGIN
@class MPPObjectDetector;
/**
* This protocol defines an interface for the delegates of `MPPObjectDetector` object to receive
* results of performing asynchronous object detection on images (i.e, when `runningMode` =
* `MPPRunningModeLiveStream`).
*
* The delegate of `MPPObjectDetector` must adopt `MPPObjectDetectorLiveStreamDelegate` protocol.
* The methods in this protocol are optional.
*/
NS_SWIFT_NAME(ObjectDetectorLiveStreamDelegate)
@protocol MPPObjectDetectorLiveStreamDelegate <NSObject>
@optional
/**
* This method notifies a delegate that the results of asynchronous object detection of
* an image submitted to the `MPPObjectDetector` is available.
*
* This method is called on a private serial dispatch queue created by the `MPPObjectDetector`
* for performing the asynchronous delegates calls.
*
* @param objectDetector The object detector which performed the object detection.
* This is useful to test equality when there are multiple instances of `MPPObjectDetector`.
* @param result The `MPPObjectDetectionResult` object that contains a list of detections, each
* detection has a bounding box that is expressed in the unrotated input frame of reference
* coordinates system, i.e. in `[0,image_width) x [0,image_height)`, which are the dimensions of the
* underlying image data.
* @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input
* image was sent to the object detector.
* @param error An optional error parameter populated when there is an error in performing object
* detection on the input live stream image data.
*
*/
- (void)objectDetector:(MPPObjectDetector *)objectDetector
didFinishDetectionWithResult:(nullable MPPObjectDetectionResult *)result
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
error:(nullable NSError *)error
NS_SWIFT_NAME(objectDetector(_:didFinishDetection:timestampInMilliseconds:error:));
@end
/** Options for setting up a `MPPObjectDetector`. */ /** Options for setting up a `MPPObjectDetector`. */
NS_SWIFT_NAME(ObjectDetectorOptions) NS_SWIFT_NAME(ObjectDetectorOptions)
@interface MPPObjectDetectorOptions : MPPTaskOptions <NSCopying> @interface MPPObjectDetectorOptions : MPPTaskOptions <NSCopying>
/**
* Running mode of the object detector task. Defaults to `MPPRunningModeImage`.
* `MPPImageClassifier` can be created with one of the following running modes:
* 1. `MPPRunningModeImage`: The mode for performing object detection on single image inputs.
* 2. `MPPRunningModeVideo`: The mode for performing object detection on the decoded frames of a
* video.
* 3. `MPPRunningModeLiveStream`: The mode for performing object detection on a live stream of
* input data, such as from the camera.
*/
@property(nonatomic) MPPRunningMode runningMode; @property(nonatomic) MPPRunningMode runningMode;
/** /**
* The user-defined result callback for processing live stream data. The result callback should only * An object that confirms to `MPPObjectDetectorLiveStreamDelegate` protocol. This object must
* be specified when the running mode is set to the live stream mode. * implement `objectDetector:didFinishDetectionWithResult:timestampInMilliseconds:error:` to receive
* TODO: Add parameter `MPPImage` in the callback. * the results of performing asynchronous object detection on images (i.e, when `runningMode` =
* `MPPRunningModeLiveStream`).
*/ */
@property(nonatomic, copy) void (^completion) @property(nonatomic, weak, nullable) id<MPPObjectDetectorLiveStreamDelegate>
(MPPObjectDetectionResult *__nullable result, NSInteger timestampMs, NSError *error); objectDetectorLiveStreamDelegate;
/** /**
* The locale to use for display names specified through the TFLite Model Metadata, if any. Defaults * The locale to use for display names specified through the TFLite Model Metadata, if any. Defaults

View File

@ -33,7 +33,7 @@
objectDetectorOptions.categoryDenylist = self.categoryDenylist; objectDetectorOptions.categoryDenylist = self.categoryDenylist;
objectDetectorOptions.categoryAllowlist = self.categoryAllowlist; objectDetectorOptions.categoryAllowlist = self.categoryAllowlist;
objectDetectorOptions.displayNamesLocale = self.displayNamesLocale; objectDetectorOptions.displayNamesLocale = self.displayNamesLocale;
objectDetectorOptions.completion = self.completion; objectDetectorOptions.objectDetectorLiveStreamDelegate = self.objectDetectorLiveStreamDelegate;
return objectDetectorOptions; return objectDetectorOptions;
} }