Changed iOS image classifier async calls to delegate

This commit is contained in:
Prianka Liz Kariat 2023-05-02 07:28:13 +05:30
parent 0a8be0d09d
commit 86269722ba
5 changed files with 141 additions and 42 deletions

View File

@ -27,6 +27,8 @@ static NSDictionary *const kMultiObjectsRotatedImage =
@{@"name" : @"multi_objects_rotated", @"type" : @"jpg"};
static const int kMobileNetCategoriesCount = 1001;
static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks";
static NSString *const kLiveStreamTestsDictImageClassifierKey= @"image_classifier";
static NSString *const kLiveStreamTestsDictExpectationKey= @"expectation";
#define AssertEqualErrors(error, expectedError) \
XCTAssertNotNil(error); \
@ -54,11 +56,15 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks";
XCTAssertEqual(imageClassifierResult.classificationResult.classifications.count, 1); \
XCTAssertEqual(imageClassifierResult.classificationResult.classifications[0].headIndex, 0);
@interface MPPImageClassifierTests : XCTestCase
@interface MPPImageClassifierTests : XCTestCase <MPPImageClassifierDelegate> {
NSDictionary *liveStreamSucceedsTestDict;
NSDictionary *outOfOrderTimestampTestDict;
}
@end
@implementation MPPImageClassifierTests
#pragma mark Results
+ (NSArray<MPPCategory *> *)expectedResultCategoriesForClassifyBurgerImageWithFloatModel {
@ -442,8 +448,7 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks";
MPPImageClassifierOptions *options = [self imageClassifierOptionsWithModelName:kFloatModelName];
options.runningMode = runningModesToTest[i];
options.completion = ^(MPPImageClassifierResult *result, NSError *error) {
};
options.imageClassifierDelegate = self;
[self
assertCreateImageClassifierWithOptions:options
@ -453,8 +458,8 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks";
code:MPPTasksErrorCodeInvalidArgumentError
userInfo:@{
NSLocalizedDescriptionKey :
@"The vision task is in image or video mode, a "
@"user-defined result callback should not be provided."
@"The vision task is in image or video mode. The delegate must not be"
@"set in the task's options."
}]];
}
}
@ -470,8 +475,8 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks";
code:MPPTasksErrorCodeInvalidArgumentError
userInfo:@{
NSLocalizedDescriptionKey :
@"The vision task is in live stream mode, a "
@"user-defined result callback must be provided."
@"The vision task is in live stream mode. An object must be set as the delegate of"
@"the task in the its options to ensure asynchronous delivery of results."
}]];
}
@ -553,9 +558,7 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks";
MPPImageClassifierOptions *options = [self imageClassifierOptionsWithModelName:kFloatModelName];
options.runningMode = MPPRunningModeLiveStream;
options.completion = ^(MPPImageClassifierResult *result, NSError *error) {
};
options.imageClassifierDelegate = self;
MPPImageClassifier *imageClassifier = [self imageClassifierWithOptionsSucceeds:options];
@ -619,15 +622,16 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks";
options.maxResults = maxResults;
options.runningMode = MPPRunningModeLiveStream;
options.completion = ^(MPPImageClassifierResult *result, NSError *error) {
[self assertImageClassifierResult:result
hasExpectedCategoriesCount:maxResults
expectedCategories:
[MPPImageClassifierTests
expectedResultCategoriesForClassifyBurgerImageWithFloatModel]];
};
options.imageClassifierDelegate = self;
XCTestExpectation *expectation = [[XCTestExpectation alloc]
initWithDescription:@"classifyWithOutOfOrderTimestampsAndLiveStream"];
expectation.expectedFulfillmentCount = 1;
MPPImageClassifier *imageClassifier = [self imageClassifierWithOptionsSucceeds:options];
outOfOrderTimestampTestDict = @{kLiveStreamTestsDictImageClassifierKey: imageClassifier, kLiveStreamTestsDictExpectationKey: expectation};
MPPImage *image = [self imageWithFileInfo:kBurgerImage];
@ -644,6 +648,8 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks";
@"INVALID_ARGUMENT: Input timestamp must be monotonically increasing."
}];
AssertEqualErrors(error, expectedError);
[self waitForExpectations:@[expectation] timeout:1e-2f];
}
- (void)testClassifyWithLiveStreamModeSucceeds {
@ -653,24 +659,61 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks";
options.maxResults = maxResults;
options.runningMode = MPPRunningModeLiveStream;
options.completion = ^(MPPImageClassifierResult *result, NSError *error) {
[self assertImageClassifierResult:result
hasExpectedCategoriesCount:maxResults
expectedCategories:
[MPPImageClassifierTests
expectedResultCategoriesForClassifyBurgerImageWithFloatModel]];
};
options.imageClassifierDelegate = self;
NSInteger iterationCount = 100;
// Because of flow limiting, we cannot ensure that the callback will be
// invoked `iterationCount` times.
// An normal expectation will fail if expectation.fullfill() is not called
// `expectation.expectedFulfillmentCount` times.
// If `expectation.isInverted = true`, the test will only succeed if
// expectation is not fullfilled for the specified `expectedFulfillmentCount`.
// Since in our case we cannot predict how many times the expectation is
// supposed to be fullfilled setting,
// `expectation.expectedFulfillmentCount` = `iterationCount` + 1 and
// `expectation.isInverted = true` ensures that test succeeds if
// expectation is fullfilled <= `iterationCount` times.
XCTestExpectation *expectation =
[[XCTestExpectation alloc] initWithDescription:@"classifyWithLiveStream"];
expectation.expectedFulfillmentCount = iterationCount + 1;
expectation.inverted = YES;
MPPImageClassifier *imageClassifier = [self imageClassifierWithOptionsSucceeds:options];
liveStreamSucceedsTestDict = @{kLiveStreamTestsDictImageClassifierKey: imageClassifier, kLiveStreamTestsDictExpectationKey: expectation};
// TODO: Mimic initialization from CMSampleBuffer as live stream mode is most likely to be used
// with the iOS camera. AVCaptureVideoDataOutput sample buffer delegates provide frames of type
// `CMSampleBuffer`.
MPPImage *image = [self imageWithFileInfo:kBurgerImage];
for (int i = 0; i < 3; i++) {
for (int i = 0; i < iterationCount; i++) {
XCTAssertTrue([imageClassifier classifyAsyncImage:image timestampInMilliseconds:i error:nil]);
}
[self waitForExpectations:@[expectation] timeout:5];
}
- (void)imageClassifier:(MPPImageClassifier *)imageClassifier
didFinishImageClassificationWithResult:(MPPImageClassifierResult *)imageClassifierResult
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
error:(NSError *)error {
NSInteger maxResults = 3;
[self assertImageClassifierResult:imageClassifierResult
hasExpectedCategoriesCount:maxResults
expectedCategories:
[MPPImageClassifierTests
expectedResultCategoriesForClassifyBurgerImageWithFloatModel]];
if (imageClassifier == outOfOrderTimestampTestDict[kLiveStreamTestsDictImageClassifierKey]) {
[outOfOrderTimestampTestDict[kLiveStreamTestsDictExpectationKey] fulfill];
}
else if (imageClassifier == liveStreamSucceedsTestDict[kLiveStreamTestsDictImageClassifierKey]) {
[liveStreamSucceedsTestDict[kLiveStreamTestsDictExpectationKey] fulfill];
}
}
@end

View File

@ -164,8 +164,11 @@ NS_SWIFT_NAME(ImageClassifier)
* Sends live stream image data of type `MPPImage` to perform image classification using the whole
* image as region of interest. Rotation will be applied according to the `orientation` property of
* the provided `MPPImage`. Only use this method when the `MPPImageClassifier` is created with
* `MPPRunningModeLiveStream`. Results are provided asynchronously via the `completion` callback
* provided in the `MPPImageClassifierOptions`.
* `MPPRunningModeLiveStream`.
* The object which needs to be continuously notified of the available results of image
* classification must confirm to `MPPImageClassifierDelegate` protocol and implement the
* `imageClassifier:didFinishClassificationWithResult:timestampInMilliseconds:error:`
* delegate method.
*
* It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent
* to the image classifier. The input timestamps must be monotonically increasing.
@ -188,8 +191,10 @@ NS_SWIFT_NAME(ImageClassifier)
* Sends live stream image data of type `MPPImage` to perform image classification, cropped to the
* specified region of interest.. Rotation will be applied according to the `orientation` property
* of the provided `MPPImage`. Only use this method when the `MPPImageClassifier` is created with
* `MPPRunningModeLiveStream`. Results are provided asynchronously via the `completion` callback
* provided in the `MPPImageClassifierOptions`.
* `MPPRunningModeLiveStream`.
* The object which needs to be continuously notified of the available results of image
* classification must confirm to `MPPImageClassifierDelegate` protocol and implement the
* `imageClassifier:didFinishClassificationWithResult:timestampInMilliseconds:error:` delegate method.
*
* It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent
* to the image classifier. The input timestamps must be monotonically increasing.

View File

@ -27,6 +27,7 @@
namespace {
using ::mediapipe::NormalizedRect;
using ::mediapipe::Packet;
using ::mediapipe::Timestamp;
using ::mediapipe::tasks::core::PacketMap;
using ::mediapipe::tasks::core::PacketsCallback;
} // namespace
@ -53,6 +54,7 @@ static NSString *const kTaskGraphName =
/** iOS Vision Task Runner */
MPPVisionTaskRunner *_visionTaskRunner;
}
@property(nonatomic, weak) id<MPPImageClassifierDelegate> imageClassifierDelegate;
@end
@implementation MPPImageClassifier
@ -81,16 +83,34 @@ static NSString *const kTaskGraphName =
PacketsCallback packetsCallback = nullptr;
if (options.completion) {
if (options.imageClassifierDelegate) {
_imageClassifierDelegate = options.imageClassifierDelegate;
packetsCallback = [=](absl::StatusOr<PacketMap> status_or_packets) {
NSError *callbackError = nil;
MPPImageClassifierResult *result;
if ([MPPCommonUtils checkCppError:status_or_packets.status() toError:&callbackError]) {
result = [MPPImageClassifierResult
imageClassifierResultWithClassificationsPacket:
status_or_packets.value()[kClassificationsStreamName.cppString]];
if (![MPPCommonUtils checkCppError:status_or_packets.status() toError:&callbackError]) {
[_imageClassifierDelegate imageClassifier:self
didFinishClassificationWithResult:nil
timestampInMilliseconds:Timestamp::Unset().Value()
error:callbackError];
return;
}
options.completion(result, callbackError);
PacketMap &outputPacketMap = status_or_packets.value();
if (outputPacketMap[kImageOutStreamName.cppString].IsEmpty()) {
return;
}
MPPImageClassifierResult *result =
[MPPImageClassifierResult imageClassifierResultWithClassificationsPacket:
outputPacketMap[kClassificationsStreamName.cppString]];
[_imageClassifierDelegate imageClassifier:self
didFinishClassificationWithResult:result
timestampInMilliseconds:outputPacketMap[kImageOutStreamName.cppString]
.Timestamp()
.Value() /
kMicroSecondsPerMilliSecond
error:callbackError];
};
}

View File

@ -20,20 +20,51 @@
NS_ASSUME_NONNULL_BEGIN
@class MPPImageClassifier;
/**
* This protocol defines an interface for the delegates of `MPPImageClassifier` object to receive
* results of asynchronous classification of images
* (i.e, when `runningMode = MPPRunningModeLiveStream`).
*
* The delegate of `MPPImageClassifier` must adopt `MPPImageClassifierDelegate` protocol.
* The methods in this protocol are optional.
* TODO: Add parameter `MPPImage` in the callback.
*/
NS_SWIFT_NAME(ImageClassifierDelegate)
@protocol MPPImageClassifierDelegate <NSObject>
@required
- (void)imageClassifier:(MPPImageClassifier *)imageClassifier
didFinishClassificationWithResult:(nullable MPPImageClassifierResult *)result
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
error:(nullable NSError *)error
NS_SWIFT_NAME(imageClassifier(_:didFinishClassification:timestampInMilliseconds:error:));
@end
/**
* Options for setting up a `MPPImageClassifier`.
*/
NS_SWIFT_NAME(ImageClassifierOptions)
@interface MPPImageClassifierOptions : MPPTaskOptions <NSCopying>
/**
* Running mode of the image classifier task. Defaults to `MPPRunningModeImage`.
* `MPPImageClassifier` can be created with one of the following running modes:
* 1. `MPPRunningModeImage`: The mode for performing classification on single image inputs.
* 2. `MPPRunningModeVideo`: The mode for performing classification on the decoded frames of a
* video.
* 3. `MPPRunningModeLiveStream`: The mode for performing classification on a live stream of input
* data, such as from the camera.
*/
@property(nonatomic) MPPRunningMode runningMode;
/**
* The user-defined result callback for processing live stream data. The result callback should only
* be specified when the running mode is set to the live stream mode.
* TODO: Add parameter `MPPImage` in the callback.
* An object that confirms to `MPPImageClassifierDelegate` protocol. This object must implement
* `objectDetector:didFinishDetectionWithResult:timestampInMilliseconds:error:`
* to receive the results of asynchronous classification on images (i.e, when `runningMode =
* MPPRunningModeLiveStream`).
*/
@property(nonatomic, copy) void (^completion)(MPPImageClassifierResult *result, NSError *error);
@property(nonatomic, weak) id<MPPImageClassifierDelegate> imageClassifierDelegate;
/**
* The locale to use for display names specified through the TFLite Model Metadata, if any. Defaults

View File

@ -33,7 +33,7 @@
imageClassifierOptions.categoryDenylist = self.categoryDenylist;
imageClassifierOptions.categoryAllowlist = self.categoryAllowlist;
imageClassifierOptions.displayNamesLocale = self.displayNamesLocale;
imageClassifierOptions.completion = self.completion;
imageClassifierOptions.imageClassifierDelegate = self.imageClassifierDelegate;
return imageClassifierOptions;
}