Merge pull request #4373 from priankakariatyml:ios-object-detector-async-fixes

PiperOrigin-RevId: 530233608
This commit is contained in:
Copybara-Service 2023-05-08 01:14:01 -07:00
commit 1360977730
13 changed files with 242 additions and 71 deletions

View File

@ -24,6 +24,7 @@ NS_ASSUME_NONNULL_BEGIN
+ (NSString *)stringWithCppString:(std::string)text;
+ (NSString *)uuidString;
@end
NS_ASSUME_NONNULL_END

View File

@ -24,4 +24,8 @@
return [NSString stringWithCString:text.c_str() encoding:[NSString defaultCStringEncoding]];
}
+ (NSString *)uuidString {
return [[NSUUID UUID] UUIDString];
}
@end

View File

@ -28,7 +28,12 @@
return self;
}
// TODO: Implement hash
- (NSUInteger)hash {
NSUInteger nonNullPropertiesHash =
@(self.location.x).hash ^ @(self.location.y).hash ^ @(self.score).hash;
return self.label ? nonNullPropertiesHash ^ self.label.hash : nonNullPropertiesHash;
}
- (BOOL)isEqual:(nullable id)object {
if (!object) {

View File

@ -453,8 +453,8 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks";
code:MPPTasksErrorCodeInvalidArgumentError
userInfo:@{
NSLocalizedDescriptionKey :
@"The vision task is in image or video mode, a "
@"user-defined result callback should not be provided."
@"The vision task is in image or video mode. The "
@"delegate must not be set in the task's options."
}]];
}
}
@ -470,8 +470,10 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks";
code:MPPTasksErrorCodeInvalidArgumentError
userInfo:@{
NSLocalizedDescriptionKey :
@"The vision task is in live stream mode, a "
@"user-defined result callback must be provided."
@"The vision task is in live stream mode. An "
@"object must be set as the delegate of the task "
@"in its options to ensure asynchronous delivery "
@"of results."
}]];
}

View File

@ -25,6 +25,8 @@ static NSDictionary *const kCatsAndDogsRotatedImage =
static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks";
static const float pixelDifferenceTolerance = 10.0f;
static const float scoreDifferenceTolerance = 0.02f;
static NSString *const kLiveStreamTestsDictObjectDetectorKey = @"object_detector";
static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation";
#define AssertEqualErrors(error, expectedError) \
XCTAssertNotNil(error); \
@ -58,7 +60,10 @@ static const float scoreDifferenceTolerance = 0.02f;
XCTAssertEqualWithAccuracy(boundingBox.size.height, expectedBoundingBox.size.height, \
pixelDifferenceTolerance, @"index i = %d", idx);
@interface MPPObjectDetectorTests : XCTestCase
@interface MPPObjectDetectorTests : XCTestCase <MPPObjectDetectorLiveStreamDelegate> {
NSDictionary *liveStreamSucceedsTestDict;
NSDictionary *outOfOrderTimestampTestDict;
}
@end
@implementation MPPObjectDetectorTests
@ -446,31 +451,28 @@ static const float scoreDifferenceTolerance = 0.02f;
#pragma mark Running Mode Tests
- (void)testCreateObjectDetectorFailsWithResultListenerInNonLiveStreamMode {
- (void)testCreateObjectDetectorFailsWithDelegateInNonLiveStreamMode {
MPPRunningMode runningModesToTest[] = {MPPRunningModeImage, MPPRunningModeVideo};
for (int i = 0; i < sizeof(runningModesToTest) / sizeof(runningModesToTest[0]); i++) {
MPPObjectDetectorOptions *options = [self objectDetectorOptionsWithModelName:kModelName];
options.runningMode = runningModesToTest[i];
options.completion =
^(MPPObjectDetectionResult *result, NSInteger timestampInMilliseconds, NSError *error) {
};
options.objectDetectorLiveStreamDelegate = self;
[self
assertCreateObjectDetectorWithOptions:options
failsWithExpectedError:
[NSError
errorWithDomain:kExpectedErrorDomain
[NSError errorWithDomain:kExpectedErrorDomain
code:MPPTasksErrorCodeInvalidArgumentError
userInfo:@{
NSLocalizedDescriptionKey :
@"The vision task is in image or video mode, a "
@"user-defined result callback should not be provided."
@"The vision task is in image or video mode. The "
@"delegate must not be set in the task's options."
}]];
}
}
- (void)testCreateObjectDetectorFailsWithMissingResultListenerInLiveStreamMode {
- (void)testCreateObjectDetectorFailsWithMissingDelegateInLiveStreamMode {
MPPObjectDetectorOptions *options = [self objectDetectorOptionsWithModelName:kModelName];
options.runningMode = MPPRunningModeLiveStream;
@ -481,8 +483,10 @@ static const float scoreDifferenceTolerance = 0.02f;
code:MPPTasksErrorCodeInvalidArgumentError
userInfo:@{
NSLocalizedDescriptionKey :
@"The vision task is in live stream mode, a "
@"user-defined result callback must be provided."
@"The vision task is in live stream mode. An "
@"object must be set as the delegate of the task "
@"in its options to ensure asynchronous delivery "
@"of results."
}]];
}
@ -563,10 +567,7 @@ static const float scoreDifferenceTolerance = 0.02f;
MPPObjectDetectorOptions *options = [self objectDetectorOptionsWithModelName:kModelName];
options.runningMode = MPPRunningModeLiveStream;
options.completion =
^(MPPObjectDetectionResult *result, NSInteger timestampInMilliseconds, NSError *error) {
};
options.objectDetectorLiveStreamDelegate = self;
MPPObjectDetector *objectDetector = [self objectDetectorWithOptionsSucceeds:options];
@ -631,23 +632,17 @@ static const float scoreDifferenceTolerance = 0.02f;
options.maxResults = maxResults;
options.runningMode = MPPRunningModeLiveStream;
options.objectDetectorLiveStreamDelegate = self;
XCTestExpectation *expectation = [[XCTestExpectation alloc]
initWithDescription:@"detectWithOutOfOrderTimestampsAndLiveStream"];
expectation.expectedFulfillmentCount = 1;
options.completion =
^(MPPObjectDetectionResult *result, NSInteger timestampInMilliseconds, NSError *error) {
[self assertObjectDetectionResult:result
isEqualToExpectedResult:
[MPPObjectDetectorTests
expectedDetectionResultForCatsAndDogsImageWithTimestampInMilliseconds:
timestampInMilliseconds]
expectedDetectionsCount:maxResults];
[expectation fulfill];
};
MPPObjectDetector *objectDetector = [self objectDetectorWithOptionsSucceeds:options];
liveStreamSucceedsTestDict = @{
kLiveStreamTestsDictObjectDetectorKey : objectDetector,
kLiveStreamTestsDictExpectationKey : expectation
};
MPPImage *image = [self imageWithFileInfo:kCatsAndDogsImage];
@ -664,7 +659,9 @@ static const float scoreDifferenceTolerance = 0.02f;
@"INVALID_ARGUMENT: Input timestamp must be monotonically increasing."
}];
AssertEqualErrors(error, expectedError);
[self waitForExpectations:@[ expectation ] timeout:1.0];
NSTimeInterval timeout = 0.5f;
[self waitForExpectations:@[ expectation ] timeout:timeout];
}
- (void)testDetectWithLiveStreamModeSucceeds {
@ -693,19 +690,15 @@ static const float scoreDifferenceTolerance = 0.02f;
expectation.expectedFulfillmentCount = iterationCount + 1;
expectation.inverted = YES;
options.completion =
^(MPPObjectDetectionResult *result, NSInteger timestampInMilliseconds, NSError *error) {
[self assertObjectDetectionResult:result
isEqualToExpectedResult:
[MPPObjectDetectorTests
expectedDetectionResultForCatsAndDogsImageWithTimestampInMilliseconds:
timestampInMilliseconds]
expectedDetectionsCount:maxResults];
[expectation fulfill];
};
options.objectDetectorLiveStreamDelegate = self;
MPPObjectDetector *objectDetector = [self objectDetectorWithOptionsSucceeds:options];
liveStreamSucceedsTestDict = @{
kLiveStreamTestsDictObjectDetectorKey : objectDetector,
kLiveStreamTestsDictExpectationKey : expectation
};
// TODO: Mimic initialization from CMSampleBuffer as live stream mode is most likely to be used
// with the iOS camera. AVCaptureVideoDataOutput sample buffer delegates provide frames of type
// `CMSampleBuffer`.
@ -715,7 +708,28 @@ static const float scoreDifferenceTolerance = 0.02f;
XCTAssertTrue([objectDetector detectAsyncInImage:image timestampInMilliseconds:i error:nil]);
}
[self waitForExpectations:@[ expectation ] timeout:0.5];
NSTimeInterval timeout = 0.5f;
[self waitForExpectations:@[ expectation ] timeout:timeout];
}
#pragma mark MPPObjectDetectorLiveStreamDelegate Methods
- (void)objectDetector:(MPPObjectDetector *)objectDetector
didFinishDetectionWithResult:(MPPObjectDetectionResult *)objectDetectionResult
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
error:(NSError *)error {
NSInteger maxResults = 4;
[self assertObjectDetectionResult:objectDetectionResult
isEqualToExpectedResult:
[MPPObjectDetectorTests
expectedDetectionResultForCatsAndDogsImageWithTimestampInMilliseconds:
timestampInMilliseconds]
expectedDetectionsCount:maxResults];
if (objectDetector == outOfOrderTimestampTestDict[kLiveStreamTestsDictObjectDetectorKey]) {
[outOfOrderTimestampTestDict[kLiveStreamTestsDictExpectationKey] fulfill];
} else if (objectDetector == liveStreamSucceedsTestDict[kLiveStreamTestsDictObjectDetectorKey]) {
[liveStreamSucceedsTestDict[kLiveStreamTestsDictExpectationKey] fulfill];
}
}
@end

View File

@ -58,6 +58,7 @@ objc_library(
"//mediapipe/framework/formats:rect_cc_proto",
"//mediapipe/tasks/ios/common:MPPCommon",
"//mediapipe/tasks/ios/common/utils:MPPCommonUtils",
"//mediapipe/tasks/ios/common/utils:NSStringHelpers",
"//mediapipe/tasks/ios/core:MPPTaskRunner",
"//third_party/apple_frameworks:UIKit",
"@com_google_absl//absl/status:statusor",

View File

@ -141,6 +141,20 @@ NS_ASSUME_NONNULL_BEGIN
(mediapipe::tasks::core::PacketsCallback)packetsCallback
error:(NSError **)error NS_UNAVAILABLE;
/**
* This method returns a unique dispatch queue name by adding the given suffix and a `UUID` to the
* pre-defined queue name prefix for vision tasks. The vision tasks can use this method to get
* unique dispatch queue names which are consistent with other vision tasks.
* Dispatch queue names need not be unique, but for easy debugging we ensure that the queue names
* are unique.
*
* @param suffix A suffix that identifies a dispatch queue's functionality.
*
* @return A unique dispatch queue name by adding the given suffix and a `UUID` to the pre-defined
* queue name prefix for vision tasks.
*/
+ (const char *)uniqueDispatchQueueNameWithSuffix:(NSString *)suffix;
- (instancetype)init NS_UNAVAILABLE;
+ (instancetype)new NS_UNAVAILABLE;

View File

@ -16,6 +16,7 @@
#import "mediapipe/tasks/ios/common/sources/MPPCommon.h"
#import "mediapipe/tasks/ios/common/utils/sources/MPPCommonUtils.h"
#import "mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.h"
#include "absl/status/statusor.h"
@ -37,6 +38,8 @@ static const NSInteger kMPPOrientationDegreesDown = -180;
/** Rotation degrees for a 90 degree rotation to the left. */
static const NSInteger kMPPOrientationDegreesLeft = -270;
static NSString *const kTaskPrefix = @"com.mediapipe.tasks.vision";
@interface MPPVisionTaskRunner () {
MPPRunningMode _runningMode;
}
@ -54,18 +57,21 @@ static const NSInteger kMPPOrientationDegreesLeft = -270;
if (packetsCallback) {
[MPPCommonUtils createCustomError:error
withCode:MPPTasksErrorCodeInvalidArgumentError
description:@"The vision task is in image or video mode, a "
@"user-defined result callback should not be provided."];
description:@"The vision task is in image or video mode. The "
@"delegate must not be set in the task's options."];
return nil;
}
break;
}
case MPPRunningModeLiveStream: {
if (!packetsCallback) {
[MPPCommonUtils createCustomError:error
[MPPCommonUtils
createCustomError:error
withCode:MPPTasksErrorCodeInvalidArgumentError
description:@"The vision task is in live stream mode, a user-defined "
@"result callback must be provided."];
description:
@"The vision task is in live stream mode. An object must be set as the "
@"delegate of the task in its options to ensure asynchronous delivery of "
@"results."];
return nil;
}
break;
@ -197,4 +203,9 @@ static const NSInteger kMPPOrientationDegreesLeft = -270;
return [self sendPacketMap:packetMap error:error];
}
+ (const char *)uniqueDispatchQueueNameWithSuffix:(NSString *)suffix {
return [NSString stringWithFormat:@"%@.%@_%@", kTaskPrefix, suffix, [NSString uuidString]]
.UTF8String;
}
@end

View File

@ -162,6 +162,7 @@ using ::mediapipe::ImageFrame;
OSType pixelBufferFormat = CVPixelBufferGetPixelFormatType(pixelBuffer);
switch (pixelBufferFormat) {
case kCVPixelFormatType_32RGBA:
case kCVPixelFormatType_32BGRA: {
return [MPPCVPixelBufferUtils rgbImageFrameFromCVPixelBuffer:pixelBuffer error:error];
}
@ -169,7 +170,8 @@ using ::mediapipe::ImageFrame;
[MPPCommonUtils createCustomError:error
withCode:MPPTasksErrorCodeInvalidArgumentError
description:@"Unsupported pixel format for CVPixelBuffer. Supported "
@"pixel format types are kCVPixelFormatType_32BGRA"];
@"pixel format types are kCVPixelFormatType_32BGRA and "
@"kCVPixelFormatType_32RGBA"];
}
}

View File

@ -96,6 +96,15 @@ NS_SWIFT_NAME(ObjectDetector)
* `MPPImage`. Only use this method when the `MPPObjectDetector` is created with
* `MPPRunningModeImage`.
*
* This method supports classification of RGBA images. If your `MPPImage` has a source type of
* `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer
* must have one of the following pixel format types:
* 1. kCVPixelFormatType_32BGRA
* 2. kCVPixelFormatType_32RGBA
*
* If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is
* RGB with an Alpha channel.
*
* @param image The `MPPImage` on which object detection is to be performed.
* @param error An optional error parameter populated when there is an error in performing object
* detection on the input image.
@ -115,6 +124,15 @@ NS_SWIFT_NAME(ObjectDetector)
* the provided `MPPImage`. Only use this method when the `MPPObjectDetector` is created with
* `MPPRunningModeVideo`.
*
* This method supports classification of RGBA images. If your `MPPImage` has a source type of
* `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer
* must have one of the following pixel format types:
* 1. kCVPixelFormatType_32BGRA
* 2. kCVPixelFormatType_32RGBA
*
* If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is
* RGB with an Alpha channel.
*
* @param image The `MPPImage` on which object detection is to be performed.
* @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input
* timestamps must be monotonically increasing.
@ -135,12 +153,28 @@ NS_SWIFT_NAME(ObjectDetector)
* Sends live stream image data of type `MPPImage` to perform object detection using the whole
* image as region of interest. Rotation will be applied according to the `orientation` property of
* the provided `MPPImage`. Only use this method when the `MPPObjectDetector` is created with
* `MPPRunningModeLiveStream`. Results are provided asynchronously via the `completion` callback
* provided in the `MPPObjectDetectorOptions`.
* `MPPRunningModeLiveStream`.
*
* The object which needs to be continuously notified of the available results of object
* detection must confirm to `MPPObjectDetectorLiveStreamDelegate` protocol and implement the
* `objectDetector:didFinishDetectionWithResult:timestampInMilliseconds:error:` delegate method.
*
* It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent
* to the object detector. The input timestamps must be monotonically increasing.
*
* This method supports classification of RGBA images. If your `MPPImage` has a source type of
* `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer
* must have one of the following pixel format types:
* 1. kCVPixelFormatType_32BGRA
* 2. kCVPixelFormatType_32RGBA
*
* If the input `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color
* space is RGB with an Alpha channel.
*
* If this method is used for classifying live camera frames using `AVFoundation`, ensure that you
* request `AVCaptureVideoDataOutput` to output frames in `kCMPixelFormat_32RGBA` using its
* `videoSettings` property.
*
* @param image A live stream image data of type `MPPImage` on which object detection is to be
* performed.
* @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input

View File

@ -37,8 +37,8 @@ static NSString *const kImageOutStreamName = @"image_out";
static NSString *const kImageTag = @"IMAGE";
static NSString *const kNormRectStreamName = @"norm_rect_in";
static NSString *const kNormRectTag = @"NORM_RECT";
static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorGraph";
static NSString *const kTaskName = @"objectDetector";
#define InputPacketMap(imagePacket, normalizedRectPacket) \
{ \
@ -51,6 +51,7 @@ static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorG
/** iOS Vision Task Runner */
MPPVisionTaskRunner *_visionTaskRunner;
}
@property(nonatomic, weak) id<MPPObjectDetectorLiveStreamDelegate> objectDetectorLiveStreamDelegate;
@end
@implementation MPPObjectDetector
@ -78,11 +79,37 @@ static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorG
PacketsCallback packetsCallback = nullptr;
if (options.completion) {
if (options.objectDetectorLiveStreamDelegate) {
_objectDetectorLiveStreamDelegate = options.objectDetectorLiveStreamDelegate;
// Capturing `self` as weak in order to avoid `self` being kept in memory
// and cause a retain cycle, after self is set to `nil`.
MPPObjectDetector *__weak weakSelf = self;
// Create a private serial dispatch queue in which the delegate method will be called
// asynchronously. This is to ensure that if the client performs a long running operation in
// the delegate method, the queue on which the C++ callbacks is invoked is not blocked and is
// freed up to continue with its operations.
dispatch_queue_t callbackQueue = dispatch_queue_create(
[MPPVisionTaskRunner uniqueDispatchQueueNameWithSuffix:kTaskName], NULL);
packetsCallback = [=](absl::StatusOr<PacketMap> statusOrPackets) {
if (!weakSelf) {
return;
}
if (![weakSelf.objectDetectorLiveStreamDelegate
respondsToSelector:@selector
(objectDetector:didFinishDetectionWithResult:timestampInMilliseconds:error:)]) {
return;
}
NSError *callbackError = nil;
if (![MPPCommonUtils checkCppError:statusOrPackets.status() toError:&callbackError]) {
options.completion(nil, Timestamp::Unset().Value(), callbackError);
dispatch_async(callbackQueue, ^{
[weakSelf.objectDetectorLiveStreamDelegate objectDetector:weakSelf
didFinishDetectionWithResult:nil
timestampInMilliseconds:Timestamp::Unset().Value()
error:callbackError];
});
return;
}
@ -95,10 +122,15 @@ static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorG
objectDetectionResultWithDetectionsPacket:statusOrPackets.value()[kDetectionsStreamName
.cppString]];
options.completion(result,
NSInteger timeStampInMilliseconds =
outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() /
kMicroSecondsPerMilliSecond,
callbackError);
kMicroSecondsPerMilliSecond;
dispatch_async(callbackQueue, ^{
[weakSelf.objectDetectorLiveStreamDelegate objectDetector:weakSelf
didFinishDetectionWithResult:result
timestampInMilliseconds:timeStampInMilliseconds
error:callbackError];
});
};
}
@ -112,6 +144,7 @@ static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorG
return nil;
}
}
return self;
}
@ -224,5 +257,4 @@ static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorG
return [_visionTaskRunner processLiveStreamPacketMap:inputPacketMap.value() error:error];
}
@end

View File

@ -20,19 +20,70 @@
NS_ASSUME_NONNULL_BEGIN
@class MPPObjectDetector;
/**
* This protocol defines an interface for the delegates of `MPPObjectDetector` object to receive
* results of performing asynchronous object detection on images (i.e, when `runningMode` =
* `MPPRunningModeLiveStream`).
*
* The delegate of `MPPObjectDetector` must adopt `MPPObjectDetectorLiveStreamDelegate` protocol.
* The methods in this protocol are optional.
*/
NS_SWIFT_NAME(ObjectDetectorLiveStreamDelegate)
@protocol MPPObjectDetectorLiveStreamDelegate <NSObject>
@optional
/**
* This method notifies a delegate that the results of asynchronous object detection of
* an image submitted to the `MPPObjectDetector` is available.
*
* This method is called on a private serial dispatch queue created by the `MPPObjectDetector`
* for performing the asynchronous delegates calls.
*
* @param objectDetector The object detector which performed the object detection.
* This is useful to test equality when there are multiple instances of `MPPObjectDetector`.
* @param result The `MPPObjectDetectionResult` object that contains a list of detections, each
* detection has a bounding box that is expressed in the unrotated input frame of reference
* coordinates system, i.e. in `[0,image_width) x [0,image_height)`, which are the dimensions of the
* underlying image data.
* @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input
* image was sent to the object detector.
* @param error An optional error parameter populated when there is an error in performing object
* detection on the input live stream image data.
*
*/
- (void)objectDetector:(MPPObjectDetector *)objectDetector
didFinishDetectionWithResult:(nullable MPPObjectDetectionResult *)result
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
error:(nullable NSError *)error
NS_SWIFT_NAME(objectDetector(_:didFinishDetection:timestampInMilliseconds:error:));
@end
/** Options for setting up a `MPPObjectDetector`. */
NS_SWIFT_NAME(ObjectDetectorOptions)
@interface MPPObjectDetectorOptions : MPPTaskOptions <NSCopying>
/**
* Running mode of the object detector task. Defaults to `MPPRunningModeImage`.
* `MPPImageClassifier` can be created with one of the following running modes:
* 1. `MPPRunningModeImage`: The mode for performing object detection on single image inputs.
* 2. `MPPRunningModeVideo`: The mode for performing object detection on the decoded frames of a
* video.
* 3. `MPPRunningModeLiveStream`: The mode for performing object detection on a live stream of
* input data, such as from the camera.
*/
@property(nonatomic) MPPRunningMode runningMode;
/**
* The user-defined result callback for processing live stream data. The result callback should only
* be specified when the running mode is set to the live stream mode.
* TODO: Add parameter `MPPImage` in the callback.
* An object that confirms to `MPPObjectDetectorLiveStreamDelegate` protocol. This object must
* implement `objectDetector:didFinishDetectionWithResult:timestampInMilliseconds:error:` to receive
* the results of performing asynchronous object detection on images (i.e, when `runningMode` =
* `MPPRunningModeLiveStream`).
*/
@property(nonatomic, copy) void (^completion)
(MPPObjectDetectionResult *__nullable result, NSInteger timestampMs, NSError *error);
@property(nonatomic, weak, nullable) id<MPPObjectDetectorLiveStreamDelegate>
objectDetectorLiveStreamDelegate;
/**
* The locale to use for display names specified through the TFLite Model Metadata, if any. Defaults

View File

@ -33,7 +33,7 @@
objectDetectorOptions.categoryDenylist = self.categoryDenylist;
objectDetectorOptions.categoryAllowlist = self.categoryAllowlist;
objectDetectorOptions.displayNamesLocale = self.displayNamesLocale;
objectDetectorOptions.completion = self.completion;
objectDetectorOptions.objectDetectorLiveStreamDelegate = self.objectDetectorLiveStreamDelegate;
return objectDetectorOptions;
}