diff --git a/mediapipe/tasks/ios/test/vision/object_detector/MPPObjectDetectorTests.m b/mediapipe/tasks/ios/test/vision/object_detector/MPPObjectDetectorTests.m index fd9466b7d..9e1c38722 100644 --- a/mediapipe/tasks/ios/test/vision/object_detector/MPPObjectDetectorTests.m +++ b/mediapipe/tasks/ios/test/vision/object_detector/MPPObjectDetectorTests.m @@ -25,6 +25,8 @@ static NSDictionary *const kCatsAndDogsRotatedImage = static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks"; static const float pixelDifferenceTolerance = 10.0f; static const float scoreDifferenceTolerance = 0.02f; +static NSString *const kLiveStreamTestsDictObjectDetectorKey = @"object_detector"; +static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; #define AssertEqualErrors(error, expectedError) \ XCTAssertNotNil(error); \ @@ -58,7 +60,10 @@ static const float scoreDifferenceTolerance = 0.02f; XCTAssertEqualWithAccuracy(boundingBox.size.height, expectedBoundingBox.size.height, \ pixelDifferenceTolerance, @"index i = %d", idx); -@interface MPPObjectDetectorTests : XCTestCase +@interface MPPObjectDetectorTests : XCTestCase { + NSDictionary *liveStreamSucceedsTestDict; + NSDictionary *outOfOrderTimestampTestDict; +} @end @implementation MPPObjectDetectorTests @@ -452,21 +457,18 @@ static const float scoreDifferenceTolerance = 0.02f; MPPObjectDetectorOptions *options = [self objectDetectorOptionsWithModelName:kModelName]; options.runningMode = runningModesToTest[i]; - options.completion = - ^(MPPObjectDetectionResult *result, NSInteger timestampInMilliseconds, NSError *error) { - }; + options.objectDetectorDelegate = self; - [self - assertCreateObjectDetectorWithOptions:options - failsWithExpectedError: - [NSError - errorWithDomain:kExpectedErrorDomain - code:MPPTasksErrorCodeInvalidArgumentError - userInfo:@{ - NSLocalizedDescriptionKey : - @"The vision task is in image or video mode, a " - @"user-defined result callback should not be provided." - }]]; + [self assertCreateObjectDetectorWithOptions:options + failsWithExpectedError: + [NSError errorWithDomain:kExpectedErrorDomain + code:MPPTasksErrorCodeInvalidArgumentError + userInfo:@{ + NSLocalizedDescriptionKey : + @"The vision task is in image or video mode. " + @"The delegate must not be" + @"set in the task's options." + }]]; } } @@ -481,8 +483,10 @@ static const float scoreDifferenceTolerance = 0.02f; code:MPPTasksErrorCodeInvalidArgumentError userInfo:@{ NSLocalizedDescriptionKey : - @"The vision task is in live stream mode, a " - @"user-defined result callback must be provided." + @"The vision task is in live stream mode. An " + @"object must be set as the delegate of" + @"the task in the its options to ensure " + @"asynchronous delivery of results." }]]; } @@ -636,18 +640,11 @@ static const float scoreDifferenceTolerance = 0.02f; initWithDescription:@"detectWithOutOfOrderTimestampsAndLiveStream"]; expectation.expectedFulfillmentCount = 1; - options.completion = - ^(MPPObjectDetectionResult *result, NSInteger timestampInMilliseconds, NSError *error) { - [self assertObjectDetectionResult:result - isEqualToExpectedResult: - [MPPObjectDetectorTests - expectedDetectionResultForCatsAndDogsImageWithTimestampInMilliseconds: - timestampInMilliseconds] - expectedDetectionsCount:maxResults]; - [expectation fulfill]; - }; - MPPObjectDetector *objectDetector = [self objectDetectorWithOptionsSucceeds:options]; + liveStreamSucceedsTestDict = { + kLiveStreamTestsDictObjectDetectorKey : objectDetector, + kLiveStreamTestsDictExpectationKey : expectation + }; MPPImage *image = [self imageWithFileInfo:kCatsAndDogsImage]; @@ -693,19 +690,15 @@ static const float scoreDifferenceTolerance = 0.02f; expectation.expectedFulfillmentCount = iterationCount + 1; expectation.inverted = YES; - options.completion = - ^(MPPObjectDetectionResult *result, NSInteger timestampInMilliseconds, NSError *error) { - [self assertObjectDetectionResult:result - isEqualToExpectedResult: - [MPPObjectDetectorTests - expectedDetectionResultForCatsAndDogsImageWithTimestampInMilliseconds: - timestampInMilliseconds] - expectedDetectionsCount:maxResults]; - [expectation fulfill]; - }; + options.objectDetectorDelegate = self; MPPObjectDetector *objectDetector = [self objectDetectorWithOptionsSucceeds:options]; + liveStreamSucceedsTestDict = { + kLiveStreamTestsDictObjectDetectorKey : objectDetector, + kLiveStreamTestsDictExpectationKey : expectation + }; + // TODO: Mimic initialization from CMSampleBuffer as live stream mode is most likely to be used // with the iOS camera. AVCaptureVideoDataOutput sample buffer delegates provide frames of type // `CMSampleBuffer`. @@ -718,4 +711,23 @@ static const float scoreDifferenceTolerance = 0.02f; [self waitForExpectations:@[ expectation ] timeout:0.5]; } +- (void)objectDetector:(MPPObjectDetector *)objectDetector + didFinishObjectDetectionWithResult:(MPPObjectDetectionResult *)objectDetectionResult + timestampInMilliseconds:(NSInteger)timestampInMilliseconds + error:(NSError *)error { + NSInteger maxResults = 4; + [self assertObjectDetectionResult:result + isEqualToExpectedResult: + [MPPObjectDetectorTests + expectedDetectionResultForCatsAndDogsImageWithTimestampInMilliseconds: + timestampInMilliseconds] + expectedDetectionsCount:maxResults]; + + if (objectDetector == outOfOrderTimestampTestDict[kLiveStreamTestsDictObjectDetectorKey]) { + [outOfOrderTimestampTestDict[kLiveStreamTestsDictExpectationKey] fulfill]; + } else if (objectDetector == liveStreamSucceedsTestDict[kLiveStreamTestsDictObjectDetectorKey]) { + [liveStreamSucceedsTestDict[kLiveStreamTestsDictExpectationKey] fulfill]; + } +} + @end