Merge pull request #4372 from priankakariatyml:ios-image-classifier-async-fixes

PiperOrigin-RevId: 531517080
This commit is contained in:
Copybara-Service 2023-05-12 08:50:14 -07:00
commit c7ba201e6a
8 changed files with 267 additions and 47 deletions

View File

@ -27,6 +27,8 @@ static NSDictionary *const kMultiObjectsRotatedImage =
@{@"name" : @"multi_objects_rotated", @"type" : @"jpg"}; @{@"name" : @"multi_objects_rotated", @"type" : @"jpg"};
static const int kMobileNetCategoriesCount = 1001; static const int kMobileNetCategoriesCount = 1001;
static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks"; static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks";
static NSString *const kLiveStreamTestsDictImageClassifierKey = @"image_classifier";
static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation";
#define AssertEqualErrors(error, expectedError) \ #define AssertEqualErrors(error, expectedError) \
XCTAssertNotNil(error); \ XCTAssertNotNil(error); \
@ -54,11 +56,14 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks";
XCTAssertEqual(imageClassifierResult.classificationResult.classifications.count, 1); \ XCTAssertEqual(imageClassifierResult.classificationResult.classifications.count, 1); \
XCTAssertEqual(imageClassifierResult.classificationResult.classifications[0].headIndex, 0); XCTAssertEqual(imageClassifierResult.classificationResult.classifications[0].headIndex, 0);
@interface MPPImageClassifierTests : XCTestCase @interface MPPImageClassifierTests : XCTestCase <MPPImageClassifierLiveStreamDelegate> {
NSDictionary *liveStreamSucceedsTestDict;
NSDictionary *outOfOrderTimestampTestDict;
}
@end @end
@implementation MPPImageClassifierTests @implementation MPPImageClassifierTests
#pragma mark Results #pragma mark Results
+ (NSArray<MPPCategory *> *)expectedResultCategoriesForClassifyBurgerImageWithFloatModel { + (NSArray<MPPCategory *> *)expectedResultCategoriesForClassifyBurgerImageWithFloatModel {
@ -436,14 +441,13 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks";
#pragma mark Running Mode Tests #pragma mark Running Mode Tests
- (void)testCreateImageClassifierFailsWithResultListenerInNonLiveStreamMode { - (void)testCreateImageClassifierFailsWithDelegateInNonLiveStreamMode {
MPPRunningMode runningModesToTest[] = {MPPRunningModeImage, MPPRunningModeVideo}; MPPRunningMode runningModesToTest[] = {MPPRunningModeImage, MPPRunningModeVideo};
for (int i = 0; i < sizeof(runningModesToTest) / sizeof(runningModesToTest[0]); i++) { for (int i = 0; i < sizeof(runningModesToTest) / sizeof(runningModesToTest[0]); i++) {
MPPImageClassifierOptions *options = [self imageClassifierOptionsWithModelName:kFloatModelName]; MPPImageClassifierOptions *options = [self imageClassifierOptionsWithModelName:kFloatModelName];
options.runningMode = runningModesToTest[i]; options.runningMode = runningModesToTest[i];
options.completion = ^(MPPImageClassifierResult *result, NSError *error) { options.imageClassifierLiveStreamDelegate = self;
};
[self [self
assertCreateImageClassifierWithOptions:options assertCreateImageClassifierWithOptions:options
@ -459,7 +463,7 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks";
} }
} }
- (void)testCreateImageClassifierFailsWithMissingResultListenerInLiveStreamMode { - (void)testCreateImageClassifierFailsWithMissingDelegateInLiveStreamMode {
MPPImageClassifierOptions *options = [self imageClassifierOptionsWithModelName:kFloatModelName]; MPPImageClassifierOptions *options = [self imageClassifierOptionsWithModelName:kFloatModelName];
options.runningMode = MPPRunningModeLiveStream; options.runningMode = MPPRunningModeLiveStream;
@ -555,9 +559,7 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks";
MPPImageClassifierOptions *options = [self imageClassifierOptionsWithModelName:kFloatModelName]; MPPImageClassifierOptions *options = [self imageClassifierOptionsWithModelName:kFloatModelName];
options.runningMode = MPPRunningModeLiveStream; options.runningMode = MPPRunningModeLiveStream;
options.completion = ^(MPPImageClassifierResult *result, NSError *error) { options.imageClassifierLiveStreamDelegate = self;
};
MPPImageClassifier *imageClassifier = [self imageClassifierWithOptionsSucceeds:options]; MPPImageClassifier *imageClassifier = [self imageClassifierWithOptionsSucceeds:options];
@ -621,16 +623,20 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks";
options.maxResults = maxResults; options.maxResults = maxResults;
options.runningMode = MPPRunningModeLiveStream; options.runningMode = MPPRunningModeLiveStream;
options.completion = ^(MPPImageClassifierResult *result, NSError *error) { options.imageClassifierLiveStreamDelegate = self;
[self assertImageClassifierResult:result
hasExpectedCategoriesCount:maxResults XCTestExpectation *expectation = [[XCTestExpectation alloc]
expectedCategories: initWithDescription:@"classifyWithOutOfOrderTimestampsAndLiveStream"];
[MPPImageClassifierTests
expectedResultCategoriesForClassifyBurgerImageWithFloatModel]]; expectation.expectedFulfillmentCount = 1;
};
MPPImageClassifier *imageClassifier = [self imageClassifierWithOptionsSucceeds:options]; MPPImageClassifier *imageClassifier = [self imageClassifierWithOptionsSucceeds:options];
outOfOrderTimestampTestDict = @{
kLiveStreamTestsDictImageClassifierKey : imageClassifier,
kLiveStreamTestsDictExpectationKey : expectation
};
MPPImage *image = [self imageWithFileInfo:kBurgerImage]; MPPImage *image = [self imageWithFileInfo:kBurgerImage];
XCTAssertTrue([imageClassifier classifyAsyncImage:image timestampInMilliseconds:1 error:nil]); XCTAssertTrue([imageClassifier classifyAsyncImage:image timestampInMilliseconds:1 error:nil]);
@ -646,6 +652,9 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks";
@"INVALID_ARGUMENT: Input timestamp must be monotonically increasing." @"INVALID_ARGUMENT: Input timestamp must be monotonically increasing."
}]; }];
AssertEqualErrors(error, expectedError); AssertEqualErrors(error, expectedError);
NSTimeInterval timeout = 0.5f;
[self waitForExpectations:@[ expectation ] timeout:timeout];
} }
- (void)testClassifyWithLiveStreamModeSucceeds { - (void)testClassifyWithLiveStreamModeSucceeds {
@ -655,24 +664,64 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks";
options.maxResults = maxResults; options.maxResults = maxResults;
options.runningMode = MPPRunningModeLiveStream; options.runningMode = MPPRunningModeLiveStream;
options.completion = ^(MPPImageClassifierResult *result, NSError *error) { options.imageClassifierLiveStreamDelegate = self;
[self assertImageClassifierResult:result
hasExpectedCategoriesCount:maxResults NSInteger iterationCount = 100;
expectedCategories:
[MPPImageClassifierTests // Because of flow limiting, we cannot ensure that the callback will be
expectedResultCategoriesForClassifyBurgerImageWithFloatModel]]; // invoked `iterationCount` times.
}; // An normal expectation will fail if expectation.fullfill() is not called
// `expectation.expectedFulfillmentCount` times.
// If `expectation.isInverted = true`, the test will only succeed if
// expectation is not fullfilled for the specified `expectedFulfillmentCount`.
// Since in our case we cannot predict how many times the expectation is
// supposed to be fullfilled setting,
// `expectation.expectedFulfillmentCount` = `iterationCount` + 1 and
// `expectation.isInverted = true` ensures that test succeeds if
// expectation is fullfilled <= `iterationCount` times.
XCTestExpectation *expectation =
[[XCTestExpectation alloc] initWithDescription:@"classifyWithLiveStream"];
expectation.expectedFulfillmentCount = iterationCount + 1;
expectation.inverted = YES;
MPPImageClassifier *imageClassifier = [self imageClassifierWithOptionsSucceeds:options]; MPPImageClassifier *imageClassifier = [self imageClassifierWithOptionsSucceeds:options];
liveStreamSucceedsTestDict = @{
kLiveStreamTestsDictImageClassifierKey : imageClassifier,
kLiveStreamTestsDictExpectationKey : expectation
};
// TODO: Mimic initialization from CMSampleBuffer as live stream mode is most likely to be used // TODO: Mimic initialization from CMSampleBuffer as live stream mode is most likely to be used
// with the iOS camera. AVCaptureVideoDataOutput sample buffer delegates provide frames of type // with the iOS camera. AVCaptureVideoDataOutput sample buffer delegates provide frames of type
// `CMSampleBuffer`. // `CMSampleBuffer`.
MPPImage *image = [self imageWithFileInfo:kBurgerImage]; MPPImage *image = [self imageWithFileInfo:kBurgerImage];
for (int i = 0; i < 3; i++) { for (int i = 0; i < iterationCount; i++) {
XCTAssertTrue([imageClassifier classifyAsyncImage:image timestampInMilliseconds:i error:nil]); XCTAssertTrue([imageClassifier classifyAsyncImage:image timestampInMilliseconds:i error:nil]);
} }
NSTimeInterval timeout = 0.5f;
[self waitForExpectations:@[ expectation ] timeout:timeout];
}
- (void)imageClassifier:(MPPImageClassifier *)imageClassifier
didFinishClassificationWithResult:(MPPImageClassifierResult *)imageClassifierResult
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
error:(NSError *)error {
NSInteger maxResults = 3;
[self assertImageClassifierResult:imageClassifierResult
hasExpectedCategoriesCount:maxResults
expectedCategories:
[MPPImageClassifierTests
expectedResultCategoriesForClassifyBurgerImageWithFloatModel]];
if (imageClassifier == outOfOrderTimestampTestDict[kLiveStreamTestsDictImageClassifierKey]) {
[outOfOrderTimestampTestDict[kLiveStreamTestsDictExpectationKey] fulfill];
} else if (imageClassifier ==
liveStreamSucceedsTestDict[kLiveStreamTestsDictImageClassifierKey]) {
[liveStreamSucceedsTestDict[kLiveStreamTestsDictExpectationKey] fulfill];
}
} }
@end @end

View File

@ -85,6 +85,14 @@ NS_SWIFT_NAME(ImageClassifier)
* interest. Rotation will be applied according to the `orientation` property of the provided * interest. Rotation will be applied according to the `orientation` property of the provided
* `MPPImage`. Only use this method when the `MPPImageClassifier` is created with * `MPPImage`. Only use this method when the `MPPImageClassifier` is created with
* `MPPRunningModeImage`. * `MPPRunningModeImage`.
* This method supports classification of RGBA images. If your `MPPImage` has a source type of
* `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer
* must have one of the following pixel format types:
* 1. kCVPixelFormatType_32BGRA
* 2. kCVPixelFormatType_32RGBA
*
* If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is
* RGB with an Alpha channel.
* *
* @param image The `MPPImage` on which image classification is to be performed. * @param image The `MPPImage` on which image classification is to be performed.
* @param error An optional error parameter populated when there is an error in performing image * @param error An optional error parameter populated when there is an error in performing image
@ -102,6 +110,15 @@ NS_SWIFT_NAME(ImageClassifier)
* of the provided `MPPImage`. Only use this method when the `MPPImageClassifier` is created with * of the provided `MPPImage`. Only use this method when the `MPPImageClassifier` is created with
* `MPPRunningModeImage`. * `MPPRunningModeImage`.
* *
* This method supports classification of RGBA images. If your `MPPImage` has a source type of
* `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer
* must have one of the following pixel format types:
* 1. kCVPixelFormatType_32BGRA
* 2. kCVPixelFormatType_32RGBA
*
* If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is
* RGB with an Alpha channel.
*
* @param image The `MPPImage` on which image classification is to be performed. * @param image The `MPPImage` on which image classification is to be performed.
* @param roi A `CGRect` specifying the region of interest within the given `MPPImage`, on which * @param roi A `CGRect` specifying the region of interest within the given `MPPImage`, on which
* image classification should be performed. * image classification should be performed.
@ -121,6 +138,18 @@ NS_SWIFT_NAME(ImageClassifier)
* the provided `MPPImage`. Only use this method when the `MPPImageClassifier` is created with * the provided `MPPImage`. Only use this method when the `MPPImageClassifier` is created with
* `MPPRunningModeVideo`. * `MPPRunningModeVideo`.
* *
* It's required to provide the video frame's timestamp (in milliseconds). The input timestamps must
* be monotonically increasing.
*
* This method supports classification of RGBA images. If your `MPPImage` has a source type of
* `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer
* must have one of the following pixel format types:
* 1. kCVPixelFormatType_32BGRA
* 2. kCVPixelFormatType_32RGBA
*
* If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is
* RGB with an Alpha channel.
*
* @param image The `MPPImage` on which image classification is to be performed. * @param image The `MPPImage` on which image classification is to be performed.
* @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input * @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input
* timestamps must be monotonically increasing. * timestamps must be monotonically increasing.
@ -143,6 +172,15 @@ NS_SWIFT_NAME(ImageClassifier)
* It's required to provide the video frame's timestamp (in milliseconds). The input timestamps must * It's required to provide the video frame's timestamp (in milliseconds). The input timestamps must
* be monotonically increasing. * be monotonically increasing.
* *
* This method supports classification of RGBA images. If your `MPPImage` has a source type of
* `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer
* must have one of the following pixel format types:
* 1. kCVPixelFormatType_32BGRA
* 2. kCVPixelFormatType_32RGBA
*
* If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is
* RGB with an Alpha channel.
*
* @param image A live stream image data of type `MPPImage` on which image classification is to be * @param image A live stream image data of type `MPPImage` on which image classification is to be
* performed. * performed.
* @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input * @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input
@ -164,12 +202,29 @@ NS_SWIFT_NAME(ImageClassifier)
* Sends live stream image data of type `MPPImage` to perform image classification using the whole * Sends live stream image data of type `MPPImage` to perform image classification using the whole
* image as region of interest. Rotation will be applied according to the `orientation` property of * image as region of interest. Rotation will be applied according to the `orientation` property of
* the provided `MPPImage`. Only use this method when the `MPPImageClassifier` is created with * the provided `MPPImage`. Only use this method when the `MPPImageClassifier` is created with
* `MPPRunningModeLiveStream`. Results are provided asynchronously via the `completion` callback * `MPPRunningModeLiveStream`.
* provided in the `MPPImageClassifierOptions`. *
* The object which needs to be continuously notified of the available results of image
* classification must confirm to `MPPImageClassifierLiveStreamDelegate` protocol and implement the
* `imageClassifier:didFinishClassificationWithResult:timestampInMilliseconds:error:`
* delegate method.
* *
* It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent * It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent
* to the image classifier. The input timestamps must be monotonically increasing. * to the image classifier. The input timestamps must be monotonically increasing.
* *
* This method supports classification of RGBA images. If your `MPPImage` has a source type of
* `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer
* must have one of the following pixel format types:
* 1. kCVPixelFormatType_32BGRA
* 2. kCVPixelFormatType_32RGBA
*
* If the input `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color
* space is RGB with an Alpha channel.
*
* If this method is used for classifying live camera frames using `AVFoundation`, ensure that you
* request `AVCaptureVideoDataOutput` to output frames in `kCMPixelFormat_32RGBA` using its
* `videoSettings` property.
*
* @param image A live stream image data of type `MPPImage` on which image classification is to be * @param image A live stream image data of type `MPPImage` on which image classification is to be
* performed. * performed.
* @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input * @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input
@ -185,15 +240,32 @@ NS_SWIFT_NAME(ImageClassifier)
NS_SWIFT_NAME(classifyAsync(image:timestampInMilliseconds:)); NS_SWIFT_NAME(classifyAsync(image:timestampInMilliseconds:));
/** /**
* Sends live stream image data of type `MPPImage` to perform image classification, cropped to the * Sends live stream image data of type ``MPPImage`` to perform image classification, cropped to the
* specified region of interest.. Rotation will be applied according to the `orientation` property * specified region of interest.. Rotation will be applied according to the `orientation` property
* of the provided `MPPImage`. Only use this method when the `MPPImageClassifier` is created with * of the provided `MPPImage`. Only use this method when the `MPPImageClassifier` is created with
* `MPPRunningModeLiveStream`. Results are provided asynchronously via the `completion` callback * `MPPRunningModeLiveStream`.
* provided in the `MPPImageClassifierOptions`. *
* The object which needs to be continuously notified of the available results of image
* classification must confirm to `MPPImageClassifierLiveStreamDelegate` protocol and implement the
* `imageClassifier:didFinishClassificationWithResult:timestampInMilliseconds:error:` delegate
* method.
* *
* It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent * It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent
* to the image classifier. The input timestamps must be monotonically increasing. * to the image classifier. The input timestamps must be monotonically increasing.
* *
* This method supports classification of RGBA images. If your `MPPImage` has a source type of
* `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer
* must have one of the following pixel format types:
* 1. kCVPixelFormatType_32BGRA
* 2. kCVPixelFormatType_32RGBA
*
* If the input `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color
* space is RGB with an Alpha channel.
*
* If this method is used for classifying live camera frames using `AVFoundation`, ensure that you
* request `AVCaptureVideoDataOutput` to output frames in `kCMPixelFormat_32RGBA` using its
* `videoSettings` property.
*
* @param image A live stream image data of type `MPPImage` on which image classification is to be * @param image A live stream image data of type `MPPImage` on which image classification is to be
* performed. * performed.
* @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input * @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input

View File

@ -27,6 +27,7 @@
namespace { namespace {
using ::mediapipe::NormalizedRect; using ::mediapipe::NormalizedRect;
using ::mediapipe::Packet; using ::mediapipe::Packet;
using ::mediapipe::Timestamp;
using ::mediapipe::tasks::core::PacketMap; using ::mediapipe::tasks::core::PacketMap;
using ::mediapipe::tasks::core::PacketsCallback; using ::mediapipe::tasks::core::PacketsCallback;
} // namespace } // namespace
@ -38,9 +39,11 @@ static NSString *const kImageOutStreamName = @"image_out";
static NSString *const kImageTag = @"IMAGE"; static NSString *const kImageTag = @"IMAGE";
static NSString *const kNormRectStreamName = @"norm_rect_in"; static NSString *const kNormRectStreamName = @"norm_rect_in";
static NSString *const kNormRectTag = @"NORM_RECT"; static NSString *const kNormRectTag = @"NORM_RECT";
static NSString *const kTaskGraphName = static NSString *const kTaskGraphName =
@"mediapipe.tasks.vision.image_classifier.ImageClassifierGraph"; @"mediapipe.tasks.vision.image_classifier.ImageClassifierGraph";
static NSString *const kTaskName = @"imageClassifier";
static const int kMicroSecondsPerMilliSecond = 1000;
#define InputPacketMap(imagePacket, normalizedRectPacket) \ #define InputPacketMap(imagePacket, normalizedRectPacket) \
{ \ { \
@ -53,6 +56,8 @@ static NSString *const kTaskGraphName =
/** iOS Vision Task Runner */ /** iOS Vision Task Runner */
MPPVisionTaskRunner *_visionTaskRunner; MPPVisionTaskRunner *_visionTaskRunner;
} }
@property(nonatomic, weak) id<MPPImageClassifierLiveStreamDelegate>
imageClassifierLiveStreamDelegate;
@end @end
@implementation MPPImageClassifier @implementation MPPImageClassifier
@ -81,16 +86,58 @@ static NSString *const kTaskGraphName =
PacketsCallback packetsCallback = nullptr; PacketsCallback packetsCallback = nullptr;
if (options.completion) { if (options.imageClassifierLiveStreamDelegate) {
_imageClassifierLiveStreamDelegate = options.imageClassifierLiveStreamDelegate;
// Capturing `self` as weak in order to avoid `self` being kept in memory
// and cause a retain cycle, after self is set to `nil`.
MPPImageClassifier *__weak weakSelf = self;
// Create a private serial dispatch queue in which the deleagte method will be called
// asynchronously. This is to ensure that if the client performs a long running operation in
// the delegate method, the queue on which the C++ callbacks is invoked is not blocked and is
// freed up to continue with its operations.
const char *queueName = [MPPVisionTaskRunner uniqueDispatchQueueNameWithSuffix:kTaskName];
dispatch_queue_t callbackQueue = dispatch_queue_create(queueName, NULL);
packetsCallback = [=](absl::StatusOr<PacketMap> status_or_packets) { packetsCallback = [=](absl::StatusOr<PacketMap> status_or_packets) {
NSError *callbackError = nil; if (!weakSelf) {
MPPImageClassifierResult *result; return;
if ([MPPCommonUtils checkCppError:status_or_packets.status() toError:&callbackError]) {
result = [MPPImageClassifierResult
imageClassifierResultWithClassificationsPacket:
status_or_packets.value()[kClassificationsStreamName.cppString]];
} }
options.completion(result, callbackError); if (![weakSelf.imageClassifierLiveStreamDelegate
respondsToSelector:@selector
(imageClassifier:
didFinishClassificationWithResult:timestampInMilliseconds:error:)]) {
return;
}
NSError *callbackError = nil;
if (![MPPCommonUtils checkCppError:status_or_packets.status() toError:&callbackError]) {
dispatch_async(callbackQueue, ^{
[weakSelf.imageClassifierLiveStreamDelegate imageClassifier:weakSelf
didFinishClassificationWithResult:nil
timestampInMilliseconds:Timestamp::Unset().Value()
error:callbackError];
});
return;
}
PacketMap &outputPacketMap = status_or_packets.value();
if (outputPacketMap[kImageOutStreamName.cppString].IsEmpty()) {
return;
}
MPPImageClassifierResult *result =
[MPPImageClassifierResult imageClassifierResultWithClassificationsPacket:
outputPacketMap[kClassificationsStreamName.cppString]];
NSInteger timeStampInMilliseconds =
outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() /
kMicroSecondsPerMilliSecond;
dispatch_async(callbackQueue, ^{
[weakSelf.imageClassifierLiveStreamDelegate imageClassifier:weakSelf
didFinishClassificationWithResult:result
timestampInMilliseconds:timeStampInMilliseconds
error:callbackError];
});
}; };
} }

View File

@ -20,20 +20,67 @@
NS_ASSUME_NONNULL_BEGIN NS_ASSUME_NONNULL_BEGIN
@class MPPImageClassifier;
/**
* This protocol defines an interface for the delegates of `MPPImageClassifier` object to receive
* results of asynchronous classification of images (i.e, when `runningMode =
* MPPRunningModeLiveStream`).
*
* The delegate of `MPPImageClassifier` must adopt `MPPImageClassifierLiveStreamDelegate` protocol.
* The methods in this protocol are optional.
*/
NS_SWIFT_NAME(ImageClassifierLiveStreamDelegate)
@protocol MPPImageClassifierLiveStreamDelegate <NSObject>
@optional
/**
* This method notifies a delegate that the results of asynchronous classification of
* an image submitted to the `MPPImageClassifier` is available.
*
* This method is called on a private serial queue created by the `MPPImageClassifier`
* for performing the asynchronous delegates calls.
*
* @param imageClassifier The image classifier which performed the classification.
* This is useful to test equality when there are multiple instances of `MPPImageClassifier`.
* @param result An `MPPImageClassifierResult` object that contains a list of image classifications.
* @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input
* image was sent to the image classifier.
* @param error An optional error parameter populated when there is an error in performing image
* classification on the input live stream image data.
*/
- (void)imageClassifier:(MPPImageClassifier *)imageClassifier
didFinishClassificationWithResult:(nullable MPPImageClassifierResult *)result
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
error:(nullable NSError *)error
NS_SWIFT_NAME(imageClassifier(_:didFinishClassification:timestampInMilliseconds:error:));
@end
/** /**
* Options for setting up a `MPPImageClassifier`. * Options for setting up a `MPPImageClassifier`.
*/ */
NS_SWIFT_NAME(ImageClassifierOptions) NS_SWIFT_NAME(ImageClassifierOptions)
@interface MPPImageClassifierOptions : MPPTaskOptions <NSCopying> @interface MPPImageClassifierOptions : MPPTaskOptions <NSCopying>
/**
* Running mode of the image classifier task. Defaults to `MPPRunningModeImage`.
* `MPPImageClassifier` can be created with one of the following running modes:
* 1. `MPPRunningModeImage`: The mode for performing classification on single image inputs.
* 2. `MPPRunningModeVideo`: The mode for performing classification on the decoded frames of a
* video.
* 3. `MPPRunningModeLiveStream`: The mode for performing classification on a live stream of input
* data, such as from the camera.
*/
@property(nonatomic) MPPRunningMode runningMode; @property(nonatomic) MPPRunningMode runningMode;
/** /**
* The user-defined result callback for processing live stream data. The result callback should only * An object that confirms to `MPPImageClassifierLiveStreamDelegate` protocol. This object must
* be specified when the running mode is set to the live stream mode. * implement `objectDetector:didFinishDetectionWithResult:timestampInMilliseconds:error:` to receive
* TODO: Add parameter `MPPImage` in the callback. * the results of asynchronous classification on images (i.e, when `runningMode =
* MPPRunningModeLiveStream`).
*/ */
@property(nonatomic, copy) void (^completion)(MPPImageClassifierResult *result, NSError *error); @property(nonatomic, weak, nullable) id<MPPImageClassifierLiveStreamDelegate>
imageClassifierLiveStreamDelegate;
/** /**
* The locale to use for display names specified through the TFLite Model Metadata, if any. Defaults * The locale to use for display names specified through the TFLite Model Metadata, if any. Defaults

View File

@ -33,7 +33,7 @@
imageClassifierOptions.categoryDenylist = self.categoryDenylist; imageClassifierOptions.categoryDenylist = self.categoryDenylist;
imageClassifierOptions.categoryAllowlist = self.categoryAllowlist; imageClassifierOptions.categoryAllowlist = self.categoryAllowlist;
imageClassifierOptions.displayNamesLocale = self.displayNamesLocale; imageClassifierOptions.displayNamesLocale = self.displayNamesLocale;
imageClassifierOptions.completion = self.completion; imageClassifierOptions.imageClassifierLiveStreamDelegate = self.imageClassifierLiveStreamDelegate;
return imageClassifierOptions; return imageClassifierOptions;
} }

View File

@ -28,7 +28,7 @@ NS_ASSUME_NONNULL_BEGIN
* *
* @return An `MPPImageClassifierResult` object that contains a list of image classifications. * @return An `MPPImageClassifierResult` object that contains a list of image classifications.
*/ */
+ (MPPImageClassifierResult *)imageClassifierResultWithClassificationsPacket: + (nullable MPPImageClassifierResult *)imageClassifierResultWithClassificationsPacket:
(const mediapipe::Packet &)packet; (const mediapipe::Packet &)packet;
@end @end

View File

@ -27,9 +27,15 @@ using ::mediapipe::Packet;
@implementation MPPImageClassifierResult (Helpers) @implementation MPPImageClassifierResult (Helpers)
+ (MPPImageClassifierResult *)imageClassifierResultWithClassificationsPacket: + (nullable MPPImageClassifierResult *)imageClassifierResultWithClassificationsPacket:
(const Packet &)packet { (const Packet &)packet {
MPPClassificationResult *classificationResult = [MPPClassificationResult MPPClassificationResult *classificationResult;
if (!packet.ValidateAsType<ClassificationResultProto>().ok()) {
return nil;
}
classificationResult = [MPPClassificationResult
classificationResultWithProto:packet.Get<ClassificationResultProto>()]; classificationResultWithProto:packet.Get<ClassificationResultProto>()];
return [[MPPImageClassifierResult alloc] return [[MPPImageClassifierResult alloc]

View File

@ -52,7 +52,6 @@ NS_SWIFT_NAME(ObjectDetectorLiveStreamDelegate)
* image was sent to the object detector. * image was sent to the object detector.
* @param error An optional error parameter populated when there is an error in performing object * @param error An optional error parameter populated when there is an error in performing object
* detection on the input live stream image data. * detection on the input live stream image data.
*
*/ */
- (void)objectDetector:(MPPObjectDetector *)objectDetector - (void)objectDetector:(MPPObjectDetector *)objectDetector
didFinishDetectionWithResult:(nullable MPPObjectDetectionResult *)result didFinishDetectionWithResult:(nullable MPPObjectDetectionResult *)result