Updated iOS object detector to use delegates instead of callbacks for async calls

This commit is contained in:
Prianka Liz Kariat 2023-05-04 17:03:40 +05:30
parent a21c08bf4d
commit ab135190e5
5 changed files with 148 additions and 55 deletions

View File

@ -25,6 +25,8 @@ static NSDictionary *const kCatsAndDogsRotatedImage =
static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks"; static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks";
static const float pixelDifferenceTolerance = 10.0f; static const float pixelDifferenceTolerance = 10.0f;
static const float scoreDifferenceTolerance = 0.02f; static const float scoreDifferenceTolerance = 0.02f;
static NSString *const kLiveStreamTestsDictObjectDetectorKey = @"object_detector";
static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation";
#define AssertEqualErrors(error, expectedError) \ #define AssertEqualErrors(error, expectedError) \
XCTAssertNotNil(error); \ XCTAssertNotNil(error); \
@ -58,7 +60,10 @@ static const float scoreDifferenceTolerance = 0.02f;
XCTAssertEqualWithAccuracy(boundingBox.size.height, expectedBoundingBox.size.height, \ XCTAssertEqualWithAccuracy(boundingBox.size.height, expectedBoundingBox.size.height, \
pixelDifferenceTolerance, @"index i = %d", idx); pixelDifferenceTolerance, @"index i = %d", idx);
@interface MPPObjectDetectorTests : XCTestCase @interface MPPObjectDetectorTests : XCTestCase <MPPObjectDetectorLiveStreamDelegate> {
NSDictionary *liveStreamSucceedsTestDict;
NSDictionary *outOfOrderTimestampTestDict;
}
@end @end
@implementation MPPObjectDetectorTests @implementation MPPObjectDetectorTests
@ -446,31 +451,28 @@ static const float scoreDifferenceTolerance = 0.02f;
#pragma mark Running Mode Tests #pragma mark Running Mode Tests
- (void)testCreateObjectDetectorFailsWithResultListenerInNonLiveStreamMode { - (void)testCreateObjectDetectorFailsWithDelegateInNonLiveStreamMode {
MPPRunningMode runningModesToTest[] = {MPPRunningModeImage, MPPRunningModeVideo}; MPPRunningMode runningModesToTest[] = {MPPRunningModeImage, MPPRunningModeVideo};
for (int i = 0; i < sizeof(runningModesToTest) / sizeof(runningModesToTest[0]); i++) { for (int i = 0; i < sizeof(runningModesToTest) / sizeof(runningModesToTest[0]); i++) {
MPPObjectDetectorOptions *options = [self objectDetectorOptionsWithModelName:kModelName]; MPPObjectDetectorOptions *options = [self objectDetectorOptionsWithModelName:kModelName];
options.runningMode = runningModesToTest[i]; options.runningMode = runningModesToTest[i];
options.completion = options.objectDetectorLiveStreamDelegate = self;
^(MPPObjectDetectionResult *result, NSInteger timestampInMilliseconds, NSError *error) {
};
[self [self
assertCreateObjectDetectorWithOptions:options assertCreateObjectDetectorWithOptions:options
failsWithExpectedError: failsWithExpectedError:
[NSError [NSError errorWithDomain:kExpectedErrorDomain
errorWithDomain:kExpectedErrorDomain code:MPPTasksErrorCodeInvalidArgumentError
code:MPPTasksErrorCodeInvalidArgumentError userInfo:@{
userInfo:@{ NSLocalizedDescriptionKey :
NSLocalizedDescriptionKey : @"The vision task is in image or video mode. The "
@"The vision task is in image or video mode, a " @"delegate must not be set in the task's options."
@"user-defined result callback should not be provided." }]];
}]];
} }
} }
- (void)testCreateObjectDetectorFailsWithMissingResultListenerInLiveStreamMode { - (void)testCreateObjectDetectorFailsWithMissingDelegateInLiveStreamMode {
MPPObjectDetectorOptions *options = [self objectDetectorOptionsWithModelName:kModelName]; MPPObjectDetectorOptions *options = [self objectDetectorOptionsWithModelName:kModelName];
options.runningMode = MPPRunningModeLiveStream; options.runningMode = MPPRunningModeLiveStream;
@ -481,8 +483,11 @@ static const float scoreDifferenceTolerance = 0.02f;
code:MPPTasksErrorCodeInvalidArgumentError code:MPPTasksErrorCodeInvalidArgumentError
userInfo:@{ userInfo:@{
NSLocalizedDescriptionKey : NSLocalizedDescriptionKey :
@"The vision task is in live stream mode, a " @"The vision task is in live stream mode. An "
@"user-defined result callback must be provided." @"object must be set as the "
@"delegate of the task in its options to ensure "
@"asynchronous delivery of "
@"results."
}]]; }]];
} }
@ -563,10 +568,7 @@ static const float scoreDifferenceTolerance = 0.02f;
MPPObjectDetectorOptions *options = [self objectDetectorOptionsWithModelName:kModelName]; MPPObjectDetectorOptions *options = [self objectDetectorOptionsWithModelName:kModelName];
options.runningMode = MPPRunningModeLiveStream; options.runningMode = MPPRunningModeLiveStream;
options.completion = options.objectDetectorLiveStreamDelegate = self;
^(MPPObjectDetectionResult *result, NSInteger timestampInMilliseconds, NSError *error) {
};
MPPObjectDetector *objectDetector = [self objectDetectorWithOptionsSucceeds:options]; MPPObjectDetector *objectDetector = [self objectDetectorWithOptionsSucceeds:options];
@ -631,23 +633,17 @@ static const float scoreDifferenceTolerance = 0.02f;
options.maxResults = maxResults; options.maxResults = maxResults;
options.runningMode = MPPRunningModeLiveStream; options.runningMode = MPPRunningModeLiveStream;
options.objectDetectorLiveStreamDelegate = self;
XCTestExpectation *expectation = [[XCTestExpectation alloc] XCTestExpectation *expectation = [[XCTestExpectation alloc]
initWithDescription:@"detectWithOutOfOrderTimestampsAndLiveStream"]; initWithDescription:@"detectWithOutOfOrderTimestampsAndLiveStream"];
expectation.expectedFulfillmentCount = 1; expectation.expectedFulfillmentCount = 1;
options.completion =
^(MPPObjectDetectionResult *result, NSInteger timestampInMilliseconds, NSError *error) {
[self assertObjectDetectionResult:result
isEqualToExpectedResult:
[MPPObjectDetectorTests
expectedDetectionResultForCatsAndDogsImageWithTimestampInMilliseconds:
timestampInMilliseconds]
expectedDetectionsCount:maxResults];
[expectation fulfill];
};
MPPObjectDetector *objectDetector = [self objectDetectorWithOptionsSucceeds:options]; MPPObjectDetector *objectDetector = [self objectDetectorWithOptionsSucceeds:options];
liveStreamSucceedsTestDict = @{
kLiveStreamTestsDictObjectDetectorKey : objectDetector,
kLiveStreamTestsDictExpectationKey : expectation
};
MPPImage *image = [self imageWithFileInfo:kCatsAndDogsImage]; MPPImage *image = [self imageWithFileInfo:kCatsAndDogsImage];
@ -693,19 +689,15 @@ static const float scoreDifferenceTolerance = 0.02f;
expectation.expectedFulfillmentCount = iterationCount + 1; expectation.expectedFulfillmentCount = iterationCount + 1;
expectation.inverted = YES; expectation.inverted = YES;
options.completion = options.objectDetectorLiveStreamDelegate = self;
^(MPPObjectDetectionResult *result, NSInteger timestampInMilliseconds, NSError *error) {
[self assertObjectDetectionResult:result
isEqualToExpectedResult:
[MPPObjectDetectorTests
expectedDetectionResultForCatsAndDogsImageWithTimestampInMilliseconds:
timestampInMilliseconds]
expectedDetectionsCount:maxResults];
[expectation fulfill];
};
MPPObjectDetector *objectDetector = [self objectDetectorWithOptionsSucceeds:options]; MPPObjectDetector *objectDetector = [self objectDetectorWithOptionsSucceeds:options];
liveStreamSucceedsTestDict = @{
kLiveStreamTestsDictObjectDetectorKey : objectDetector,
kLiveStreamTestsDictExpectationKey : expectation
};
// TODO: Mimic initialization from CMSampleBuffer as live stream mode is most likely to be used // TODO: Mimic initialization from CMSampleBuffer as live stream mode is most likely to be used
// with the iOS camera. AVCaptureVideoDataOutput sample buffer delegates provide frames of type // with the iOS camera. AVCaptureVideoDataOutput sample buffer delegates provide frames of type
// `CMSampleBuffer`. // `CMSampleBuffer`.
@ -718,4 +710,24 @@ static const float scoreDifferenceTolerance = 0.02f;
[self waitForExpectations:@[ expectation ] timeout:0.5]; [self waitForExpectations:@[ expectation ] timeout:0.5];
} }
#pragma mark MPPObjectDetectorLiveStreamDelegate Methods
- (void)objectDetector:(MPPObjectDetector *)objectDetector
didFinishDetectionWithResult:(MPPObjectDetectionResult *)objectDetectionResult
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
error:(NSError *)error {
NSInteger maxResults = 4;
[self assertObjectDetectionResult:objectDetectionResult
isEqualToExpectedResult:
[MPPObjectDetectorTests
expectedDetectionResultForCatsAndDogsImageWithTimestampInMilliseconds:
timestampInMilliseconds]
expectedDetectionsCount:maxResults];
if (objectDetector == outOfOrderTimestampTestDict[kLiveStreamTestsDictObjectDetectorKey]) {
[outOfOrderTimestampTestDict[kLiveStreamTestsDictExpectationKey] fulfill];
} else if (objectDetector == liveStreamSucceedsTestDict[kLiveStreamTestsDictObjectDetectorKey]) {
[liveStreamSucceedsTestDict[kLiveStreamTestsDictExpectationKey] fulfill];
}
}
@end @end

View File

@ -137,6 +137,9 @@ NS_SWIFT_NAME(ObjectDetector)
* the provided `MPPImage`. Only use this method when the `MPPObjectDetector` is created with * the provided `MPPImage`. Only use this method when the `MPPObjectDetector` is created with
* `MPPRunningModeLiveStream`. Results are provided asynchronously via the `completion` callback * `MPPRunningModeLiveStream`. Results are provided asynchronously via the `completion` callback
* provided in the `MPPObjectDetectorOptions`. * provided in the `MPPObjectDetectorOptions`.
* The object which needs to be continuously notified of the available results of object
* detection must confirm to `MPPObjectDetectorLiveStreamDelegate` protocol and implement the
* `objectDetector:didFinishDetectionWithResult:timestampInMilliseconds:error:` delegate method.
* *
* It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent * It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent
* to the object detector. The input timestamps must be monotonically increasing. * to the object detector. The input timestamps must be monotonically increasing.

View File

@ -37,8 +37,8 @@ static NSString *const kImageOutStreamName = @"image_out";
static NSString *const kImageTag = @"IMAGE"; static NSString *const kImageTag = @"IMAGE";
static NSString *const kNormRectStreamName = @"norm_rect_in"; static NSString *const kNormRectStreamName = @"norm_rect_in";
static NSString *const kNormRectTag = @"NORM_RECT"; static NSString *const kNormRectTag = @"NORM_RECT";
static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorGraph"; static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorGraph";
static NSString *const kTaskName = @"objectDetector";
#define InputPacketMap(imagePacket, normalizedRectPacket) \ #define InputPacketMap(imagePacket, normalizedRectPacket) \
{ \ { \
@ -51,6 +51,7 @@ static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorG
/** iOS Vision Task Runner */ /** iOS Vision Task Runner */
MPPVisionTaskRunner *_visionTaskRunner; MPPVisionTaskRunner *_visionTaskRunner;
} }
@property(nonatomic, weak) id<MPPObjectDetectorLiveStreamDelegate> objectDetectorLiveStreamDelegate;
@end @end
@implementation MPPObjectDetector @implementation MPPObjectDetector
@ -78,11 +79,32 @@ static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorG
PacketsCallback packetsCallback = nullptr; PacketsCallback packetsCallback = nullptr;
if (options.completion) { if (options.objectDetectorLiveStreamDelegate) {
_objectDetectorLiveStreamDelegate = options.objectDetectorLiveStreamDelegate;
// Capturing `self` as weak in order to avoid `self` being kept in memory
// and cause a retain cycle, after self is set to `nil`.
MPPObjectDetector *__weak weakSelf = self;
dispatch_queue_t callbackQueue =
dispatch_queue_create([MPPVisionTaskRunner uniqueDispatchQueueNameWithSuffix:kTaskName], NULL);
packetsCallback = [=](absl::StatusOr<PacketMap> statusOrPackets) { packetsCallback = [=](absl::StatusOr<PacketMap> statusOrPackets) {
if (!weakSelf) {
return;
}
if (![weakSelf.objectDetectorLiveStreamDelegate
respondsToSelector:@selector
(objectDetector:didFinishDetectionWithResult:timestampInMilliseconds:error:)]) {
return;
}
NSError *callbackError = nil; NSError *callbackError = nil;
if (![MPPCommonUtils checkCppError:statusOrPackets.status() toError:&callbackError]) { if (![MPPCommonUtils checkCppError:statusOrPackets.status() toError:&callbackError]) {
options.completion(nil, Timestamp::Unset().Value(), callbackError); dispatch_async(callbackQueue, ^{
[weakSelf.objectDetectorLiveStreamDelegate objectDetector:weakSelf
didFinishDetectionWithResult:nil
timestampInMilliseconds:Timestamp::Unset().Value()
error:callbackError];
});
return; return;
} }
@ -95,10 +117,15 @@ static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorG
objectDetectionResultWithDetectionsPacket:statusOrPackets.value()[kDetectionsStreamName objectDetectionResultWithDetectionsPacket:statusOrPackets.value()[kDetectionsStreamName
.cppString]]; .cppString]];
options.completion(result, NSInteger timeStampInMilliseconds =
outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() / outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() /
kMicroSecondsPerMilliSecond, kMicroSecondsPerMilliSecond;
callbackError); dispatch_async(callbackQueue, ^{
[weakSelf.objectDetectorLiveStreamDelegate objectDetector:weakSelf
didFinishDetectionWithResult:result
timestampInMilliseconds:timeStampInMilliseconds
error:callbackError];
});
}; };
} }
@ -112,6 +139,7 @@ static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorG
return nil; return nil;
} }
} }
return self; return self;
} }
@ -224,5 +252,4 @@ static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorG
return [_visionTaskRunner processLiveStreamPacketMap:inputPacketMap.value() error:error]; return [_visionTaskRunner processLiveStreamPacketMap:inputPacketMap.value() error:error];
} }
@end @end

View File

@ -20,19 +20,70 @@
NS_ASSUME_NONNULL_BEGIN NS_ASSUME_NONNULL_BEGIN
@class MPPObjectDetector;
/**
* This protocol defines an interface for the delegates of `MPPObjectDetector` object to receive
* results of performing asynchronous object detection on images
* (i.e, when `runningMode` = `MPPRunningModeLiveStream`).
*
* The delegate of `MPPObjectDetector` must adopt `MPPObjectDetectorLiveStreamDelegate` protocol.
* The methods in this protocol are optional.
*/
NS_SWIFT_NAME(ObjectDetectorLiveStreamDelegate)
@protocol MPPObjectDetectorLiveStreamDelegate <NSObject>
@optional
/**
* This method notifies a delegate that the results of asynchronous object detection of
* an image submitted to the `MPPObjectDetector` is available.
*
* This method is called on a private serial dispatch queue created by the `MPPObjectDetector`
* for performing the asynchronous delegates calls.
*
* @param objectDetector The object detector which performed the object detection.
* This is useful to test equality when there are multiple instances of `MPPObjectDetector`.
* @param result The `MPPObjectDetectionResult` object that contains a list of detections, each
* detection has a bounding box that is expressed in the unrotated input frame of reference
* coordinates system, i.e. in `[0,image_width) x [0,image_height)`, which are the dimensions of the
* underlying image data.
* @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input
* image was sent to the object detector.
* @param error An optional error parameter populated when there is an error in performing object
* detection on the input live stream image data.
*
*/
- (void)objectDetector:(MPPObjectDetector *)objectDetector
didFinishDetectionWithResult:(nullable MPPObjectDetectionResult *)result
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
error:(nullable NSError *)error
NS_SWIFT_NAME(objectDetector(_:didFinishDetection:timestampInMilliseconds:error:));
@end
/** Options for setting up a `MPPObjectDetector`. */ /** Options for setting up a `MPPObjectDetector`. */
NS_SWIFT_NAME(ObjectDetectorOptions) NS_SWIFT_NAME(ObjectDetectorOptions)
@interface MPPObjectDetectorOptions : MPPTaskOptions <NSCopying> @interface MPPObjectDetectorOptions : MPPTaskOptions <NSCopying>
/**
* Running mode of the object detector task. Defaults to `MPPRunningModeImage`.
* `MPPImageClassifier` can be created with one of the following running modes:
* 1. `MPPRunningModeImage`: The mode for performing object detection on single image inputs.
* 2. `MPPRunningModeVideo`: The mode for performing object detection on the decoded frames of a
* video.
* 3. `MPPRunningModeLiveStream`: The mode for performing object detection on a live stream of
* input data, such as from the camera.
*/
@property(nonatomic) MPPRunningMode runningMode; @property(nonatomic) MPPRunningMode runningMode;
/** /**
* The user-defined result callback for processing live stream data. The result callback should only * An object that confirms to `MPPObjectDetectorLiveStreamDelegate` protocol. This object must
* be specified when the running mode is set to the live stream mode. * implement `objectDetector:didFinishDetectionWithResult:timestampInMilliseconds:error:` to receive
* TODO: Add parameter `MPPImage` in the callback. * the results of performing asynchronous object detection on images (i.e, when `runningMode` =
* `MPPRunningModeLiveStream`).
*/ */
@property(nonatomic, copy) void (^completion) @property(nonatomic, weak, nullable) id<MPPObjectDetectorLiveStreamDelegate>
(MPPObjectDetectionResult *__nullable result, NSInteger timestampMs, NSError *error); objectDetectorLiveStreamDelegate;
/** /**
* The locale to use for display names specified through the TFLite Model Metadata, if any. Defaults * The locale to use for display names specified through the TFLite Model Metadata, if any. Defaults

View File

@ -33,7 +33,7 @@
objectDetectorOptions.categoryDenylist = self.categoryDenylist; objectDetectorOptions.categoryDenylist = self.categoryDenylist;
objectDetectorOptions.categoryAllowlist = self.categoryAllowlist; objectDetectorOptions.categoryAllowlist = self.categoryAllowlist;
objectDetectorOptions.displayNamesLocale = self.displayNamesLocale; objectDetectorOptions.displayNamesLocale = self.displayNamesLocale;
objectDetectorOptions.completion = self.completion; objectDetectorOptions.objectDetectorLiveStreamDelegate = self.objectDetectorLiveStreamDelegate;
return objectDetectorOptions; return objectDetectorOptions;
} }