Merge pull request #4523 from priankakariatyml:ios-gesture-recognizer-add-tests

PiperOrigin-RevId: 538848389
This commit is contained in:
Copybara-Service 2023-06-08 11:48:18 -07:00
commit 4c4a1d93b2
4 changed files with 442 additions and 17 deletions

View File

@ -48,6 +48,9 @@ static const NSInteger kGestureExpectedIndex = -1;
static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks";
static const float kLandmarksErrorTolerance = 0.03f;
static NSString *const kLiveStreamTestsDictGestureRecognizerKey = @"gesture_recognizer";
static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation";
#define AssertEqualErrors(error, expectedError) \
XCTAssertNotNil(error); \
XCTAssertEqualObjects(error.domain, expectedError.domain); \
@ -72,12 +75,15 @@ static const float kLandmarksErrorTolerance = 0.03f;
XCTAssertTrue(gestureRecognizerResult.landmarks.count == 0); \
XCTAssertTrue(gestureRecognizerResult.worldLandmarks.count == 0);
@interface MPPGestureRecognizerTests : XCTestCase
@interface MPPGestureRecognizerTests : XCTestCase <MPPGestureRecognizerLiveStreamDelegate> {
NSDictionary<NSString *, id> *_liveStreamSucceedsTestDict;
NSDictionary<NSString *, id> *_outOfOrderTimestampTestDict;
}
@end
@implementation MPPGestureRecognizerTests
#pragma mark Results
#pragma mark Expected Results
+ (MPPGestureRecognizerResult *)emptyGestureRecognizerResult {
return [[MPPGestureRecognizerResult alloc] initWithGestures:@[]
@ -106,6 +112,8 @@ static const float kLandmarksErrorTolerance = 0.03f;
shouldRemoveZPosition:YES];
}
#pragma mark Assert Gesture Recognizer Results
- (void)assertMultiHandLandmarks:(NSArray<NSArray<MPPNormalizedLandmark *> *> *)multiHandLandmarks
areApproximatelyEqualToExpectedMultiHandLandmarks:
(NSArray<NSArray<MPPNormalizedLandmark *> *> *)expectedMultiHandLandmarks {
@ -175,6 +183,16 @@ static const float kLandmarksErrorTolerance = 0.03f;
areApproximatelyEqualToExpectedMultiHandGestures:expectedGestureRecognizerResult.gestures];
}
- (void)assertResultsOfRecognizeImageWithFileInfo:(ResourceFileInfo *)fileInfo
usingGestureRecognizer:(MPPGestureRecognizer *)gestureRecognizer
approximatelyEqualsGestureRecognizerResult:
(MPPGestureRecognizerResult *)expectedGestureRecognizerResult {
MPPGestureRecognizerResult *gestureRecognizerResult =
[self recognizeImageWithFileInfo:fileInfo usingGestureRecognizer:gestureRecognizer];
[self assertGestureRecognizerResult:gestureRecognizerResult
isApproximatelyEqualToExpectedResult:expectedGestureRecognizerResult];
}
#pragma mark File
+ (NSString *)filePathWithFileInfo:(ResourceFileInfo *)fileInfo {
@ -221,7 +239,7 @@ static const float kLandmarksErrorTolerance = 0.03f;
AssertEqualErrors(error, expectedError);
}
#pragma mark Assert Gesture Recognizer Results
#pragma mark Recognize Helpers
- (MPPImage *)imageWithFileInfo:(ResourceFileInfo *)fileInfo {
MPPImage *image = [MPPImage imageFromBundleWithClass:[MPPGestureRecognizerTests class]
@ -254,16 +272,6 @@ static const float kLandmarksErrorTolerance = 0.03f;
return gestureRecognizerResult;
}
- (void)assertResultsOfRecognizeImageWithFileInfo:(ResourceFileInfo *)fileInfo
usingGestureRecognizer:(MPPGestureRecognizer *)gestureRecognizer
approximatelyEqualsGestureRecognizerResult:
(MPPGestureRecognizerResult *)expectedGestureRecognizerResult {
MPPGestureRecognizerResult *gestureRecognizerResult =
[self recognizeImageWithFileInfo:fileInfo usingGestureRecognizer:gestureRecognizer];
[self assertGestureRecognizerResult:gestureRecognizerResult
isApproximatelyEqualToExpectedResult:expectedGestureRecognizerResult];
}
#pragma mark General Tests
- (void)testRecognizeWithModelPathSucceeds {
@ -279,4 +287,421 @@ static const float kLandmarksErrorTolerance = 0.03f;
thumbUpGestureRecognizerResult]];
}
- (void)testRecognizeWithEmptyResultsSucceeds {
MPPGestureRecognizerOptions *gestureRecognizerOptions =
[self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile];
MPPGestureRecognizer *gestureRecognizer =
[self createGestureRecognizerWithOptionsSucceeds:gestureRecognizerOptions];
MPPGestureRecognizerResult *gestureRecognizerResult =
[self recognizeImageWithFileInfo:kNoHandsImage usingGestureRecognizer:gestureRecognizer];
AssertGestureRecognizerResultIsEmpty(gestureRecognizerResult);
}
- (void)testRecognizeWithScoreThresholdSucceeds {
MPPGestureRecognizerOptions *gestureRecognizerOptions =
[self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile];
gestureRecognizerOptions.cannedGesturesClassifierOptions = [[MPPClassifierOptions alloc] init];
gestureRecognizerOptions.cannedGesturesClassifierOptions.scoreThreshold = 0.5f;
MPPGestureRecognizer *gestureRecognizer =
[self createGestureRecognizerWithOptionsSucceeds:gestureRecognizerOptions];
MPPGestureRecognizerResult *gestureRecognizerResult =
[self recognizeImageWithFileInfo:kThumbUpImage usingGestureRecognizer:gestureRecognizer];
MPPGestureRecognizerResult *expectedGestureRecognizerResult =
[MPPGestureRecognizerTests thumbUpGestureRecognizerResult];
XCTAssertTrue(gestureRecognizerResult.gestures.count == 1);
AssertEqualGestures(gestureRecognizerResult.gestures[0][0],
expectedGestureRecognizerResult.gestures[0][0], 0, 0);
}
- (void)testRecognizeWithNumHandsSucceeds {
MPPGestureRecognizerOptions *gestureRecognizerOptions =
[self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile];
const NSInteger numHands = 2;
gestureRecognizerOptions.numHands = numHands;
MPPGestureRecognizer *gestureRecognizer =
[self createGestureRecognizerWithOptionsSucceeds:gestureRecognizerOptions];
MPPGestureRecognizerResult *gestureRecognizerResult =
[self recognizeImageWithFileInfo:kTwoHandsImage usingGestureRecognizer:gestureRecognizer];
XCTAssertTrue(gestureRecognizerResult.handedness.count == numHands);
}
- (void)testRecognizeWithRotationSucceeds {
MPPGestureRecognizerOptions *gestureRecognizerOptions =
[self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile];
gestureRecognizerOptions.numHands = 1;
MPPGestureRecognizer *gestureRecognizer =
[self createGestureRecognizerWithOptionsSucceeds:gestureRecognizerOptions];
MPPImage *mppImage = [self imageWithFileInfo:kPointingUpRotatedImage
orientation:UIImageOrientationRight];
MPPGestureRecognizerResult *gestureRecognizerResult = [gestureRecognizer recognizeImage:mppImage
error:nil];
XCTAssertNotNil(gestureRecognizerResult);
XCTAssertEqual(gestureRecognizerResult.gestures.count, 1);
XCTAssertEqualObjects(gestureRecognizerResult.gestures[0][0].categoryName,
kExpectedPointingUpLabel);
}
- (void)testRecognizeWithCannedGestureFistSucceeds {
MPPGestureRecognizerOptions *gestureRecognizerOptions =
[self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile];
gestureRecognizerOptions.numHands = 1;
MPPGestureRecognizer *gestureRecognizer =
[self createGestureRecognizerWithOptionsSucceeds:gestureRecognizerOptions];
[self assertResultsOfRecognizeImageWithFileInfo:kFistImage
usingGestureRecognizer:gestureRecognizer
approximatelyEqualsGestureRecognizerResult:
[MPPGestureRecognizerTests fistGestureRecognizerResultWithLabel:kFistLabel]];
}
- (void)testRecognizeWithAllowGestureFistSucceeds {
MPPGestureRecognizerOptions *gestureRecognizerOptions =
[self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile];
gestureRecognizerOptions.cannedGesturesClassifierOptions = [[MPPClassifierOptions alloc] init];
gestureRecognizerOptions.cannedGesturesClassifierOptions.scoreThreshold = 0.5f;
gestureRecognizerOptions.cannedGesturesClassifierOptions.categoryAllowlist = @[ kFistLabel ];
gestureRecognizerOptions.numHands = 1;
MPPGestureRecognizer *gestureRecognizer =
[self createGestureRecognizerWithOptionsSucceeds:gestureRecognizerOptions];
[self assertResultsOfRecognizeImageWithFileInfo:kFistImage
usingGestureRecognizer:gestureRecognizer
approximatelyEqualsGestureRecognizerResult:
[MPPGestureRecognizerTests fistGestureRecognizerResultWithLabel:kFistLabel]];
}
- (void)testRecognizeWithDenyGestureFistSucceeds {
MPPGestureRecognizerOptions *gestureRecognizerOptions =
[self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile];
gestureRecognizerOptions.cannedGesturesClassifierOptions = [[MPPClassifierOptions alloc] init];
gestureRecognizerOptions.cannedGesturesClassifierOptions.scoreThreshold = 0.5f;
gestureRecognizerOptions.cannedGesturesClassifierOptions.categoryDenylist = @[ kFistLabel ];
gestureRecognizerOptions.numHands = 1;
MPPGestureRecognizer *gestureRecognizer =
[self createGestureRecognizerWithOptionsSucceeds:gestureRecognizerOptions];
MPPGestureRecognizerResult *gestureRecognizerResult =
[self recognizeImageWithFileInfo:kFistImage usingGestureRecognizer:gestureRecognizer];
AssertGestureRecognizerResultIsEmpty(gestureRecognizerResult);
}
- (void)testRecognizeWithPreferAllowlistOverDenylistSucceeds {
MPPGestureRecognizerOptions *gestureRecognizerOptions =
[self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile];
gestureRecognizerOptions.cannedGesturesClassifierOptions = [[MPPClassifierOptions alloc] init];
gestureRecognizerOptions.cannedGesturesClassifierOptions.scoreThreshold = 0.5f;
gestureRecognizerOptions.cannedGesturesClassifierOptions.categoryAllowlist = @[ kFistLabel ];
gestureRecognizerOptions.cannedGesturesClassifierOptions.categoryDenylist = @[ kFistLabel ];
gestureRecognizerOptions.numHands = 1;
MPPGestureRecognizer *gestureRecognizer =
[self createGestureRecognizerWithOptionsSucceeds:gestureRecognizerOptions];
[self assertResultsOfRecognizeImageWithFileInfo:kFistImage
usingGestureRecognizer:gestureRecognizer
approximatelyEqualsGestureRecognizerResult:
[MPPGestureRecognizerTests fistGestureRecognizerResultWithLabel:kFistLabel]];
}
#pragma mark Running Mode Tests
- (void)testCreateGestureRecognizerFailsWithDelegateInNonLiveStreamMode {
MPPRunningMode runningModesToTest[] = {MPPRunningModeImage, MPPRunningModeVideo};
for (int i = 0; i < sizeof(runningModesToTest) / sizeof(runningModesToTest[0]); i++) {
MPPGestureRecognizerOptions *options =
[self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile];
options.runningMode = runningModesToTest[i];
options.gestureRecognizerLiveStreamDelegate = self;
[self assertCreateGestureRecognizerWithOptions:options
failsWithExpectedError:
[NSError
errorWithDomain:kExpectedErrorDomain
code:MPPTasksErrorCodeInvalidArgumentError
userInfo:@{
NSLocalizedDescriptionKey :
@"The vision task is in image or video mode. The "
@"delegate must not be set in the task's options."
}]];
}
}
- (void)testCreateGestureRecognizerFailsWithMissingDelegateInLiveStreamMode {
MPPGestureRecognizerOptions *options =
[self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile];
options.runningMode = MPPRunningModeLiveStream;
[self
assertCreateGestureRecognizerWithOptions:options
failsWithExpectedError:
[NSError errorWithDomain:kExpectedErrorDomain
code:MPPTasksErrorCodeInvalidArgumentError
userInfo:@{
NSLocalizedDescriptionKey :
@"The vision task is in live stream mode. An "
@"object must be set as the delegate of the task "
@"in its options to ensure asynchronous delivery "
@"of results."
}]];
}
- (void)testRecognizeFailsWithCallingWrongApiInImageMode {
MPPGestureRecognizerOptions *options =
[self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile];
MPPGestureRecognizer *gestureRecognizer =
[self createGestureRecognizerWithOptionsSucceeds:options];
MPPImage *image = [self imageWithFileInfo:kFistImage];
NSError *liveStreamApiCallError;
XCTAssertFalse([gestureRecognizer recognizeAsyncImage:image
timestampInMilliseconds:0
error:&liveStreamApiCallError]);
NSError *expectedLiveStreamApiCallError =
[NSError errorWithDomain:kExpectedErrorDomain
code:MPPTasksErrorCodeInvalidArgumentError
userInfo:@{
NSLocalizedDescriptionKey : @"The vision task is not initialized with live "
@"stream mode. Current Running Mode: Image"
}];
AssertEqualErrors(liveStreamApiCallError, expectedLiveStreamApiCallError);
NSError *videoApiCallError;
XCTAssertFalse([gestureRecognizer recognizeVideoFrame:image
timestampInMilliseconds:0
error:&videoApiCallError]);
NSError *expectedVideoApiCallError =
[NSError errorWithDomain:kExpectedErrorDomain
code:MPPTasksErrorCodeInvalidArgumentError
userInfo:@{
NSLocalizedDescriptionKey : @"The vision task is not initialized with "
@"video mode. Current Running Mode: Image"
}];
AssertEqualErrors(videoApiCallError, expectedVideoApiCallError);
}
- (void)testRecognizeFailsWithCallingWrongApiInVideoMode {
MPPGestureRecognizerOptions *options =
[self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile];
options.runningMode = MPPRunningModeVideo;
MPPGestureRecognizer *gestureRecognizer =
[self createGestureRecognizerWithOptionsSucceeds:options];
MPPImage *image = [self imageWithFileInfo:kFistImage];
NSError *liveStreamApiCallError;
XCTAssertFalse([gestureRecognizer recognizeAsyncImage:image
timestampInMilliseconds:0
error:&liveStreamApiCallError]);
NSError *expectedLiveStreamApiCallError =
[NSError errorWithDomain:kExpectedErrorDomain
code:MPPTasksErrorCodeInvalidArgumentError
userInfo:@{
NSLocalizedDescriptionKey : @"The vision task is not initialized with live "
@"stream mode. Current Running Mode: Video"
}];
AssertEqualErrors(liveStreamApiCallError, expectedLiveStreamApiCallError);
NSError *imageApiCallError;
XCTAssertFalse([gestureRecognizer recognizeImage:image error:&imageApiCallError]);
NSError *expectedImageApiCallError =
[NSError errorWithDomain:kExpectedErrorDomain
code:MPPTasksErrorCodeInvalidArgumentError
userInfo:@{
NSLocalizedDescriptionKey : @"The vision task is not initialized with "
@"image mode. Current Running Mode: Video"
}];
AssertEqualErrors(imageApiCallError, expectedImageApiCallError);
}
- (void)testRecognizeFailsWithCallingWrongApiInLiveStreamMode {
MPPGestureRecognizerOptions *options =
[self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile];
options.runningMode = MPPRunningModeLiveStream;
options.gestureRecognizerLiveStreamDelegate = self;
MPPGestureRecognizer *gestureRecognizer =
[self createGestureRecognizerWithOptionsSucceeds:options];
MPPImage *image = [self imageWithFileInfo:kFistImage];
NSError *imageApiCallError;
XCTAssertFalse([gestureRecognizer recognizeImage:image error:&imageApiCallError]);
NSError *expectedImageApiCallError =
[NSError errorWithDomain:kExpectedErrorDomain
code:MPPTasksErrorCodeInvalidArgumentError
userInfo:@{
NSLocalizedDescriptionKey : @"The vision task is not initialized with "
@"image mode. Current Running Mode: Live Stream"
}];
AssertEqualErrors(imageApiCallError, expectedImageApiCallError);
NSError *videoApiCallError;
XCTAssertFalse([gestureRecognizer recognizeVideoFrame:image
timestampInMilliseconds:0
error:&videoApiCallError]);
NSError *expectedVideoApiCallError =
[NSError errorWithDomain:kExpectedErrorDomain
code:MPPTasksErrorCodeInvalidArgumentError
userInfo:@{
NSLocalizedDescriptionKey : @"The vision task is not initialized with "
@"video mode. Current Running Mode: Live Stream"
}];
AssertEqualErrors(videoApiCallError, expectedVideoApiCallError);
}
- (void)testRecognizeWithVideoModeSucceeds {
MPPGestureRecognizerOptions *options =
[self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile];
options.runningMode = MPPRunningModeVideo;
MPPGestureRecognizer *gestureRecognizer =
[self createGestureRecognizerWithOptionsSucceeds:options];
MPPImage *image = [self imageWithFileInfo:kThumbUpImage];
for (int i = 0; i < 3; i++) {
MPPGestureRecognizerResult *gestureRecognizerResult =
[gestureRecognizer recognizeVideoFrame:image timestampInMilliseconds:i error:nil];
[self assertGestureRecognizerResult:gestureRecognizerResult
isApproximatelyEqualToExpectedResult:[MPPGestureRecognizerTests
thumbUpGestureRecognizerResult]];
}
}
- (void)testRecognizeWithOutOfOrderTimestampsAndLiveStreamModeFails {
MPPGestureRecognizerOptions *options =
[self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile];
options.runningMode = MPPRunningModeLiveStream;
options.gestureRecognizerLiveStreamDelegate = self;
XCTestExpectation *expectation = [[XCTestExpectation alloc]
initWithDescription:@"recognizeWithOutOfOrderTimestampsAndLiveStream"];
expectation.expectedFulfillmentCount = 1;
MPPGestureRecognizer *gestureRecognizer =
[self createGestureRecognizerWithOptionsSucceeds:options];
_outOfOrderTimestampTestDict = @{
kLiveStreamTestsDictGestureRecognizerKey : gestureRecognizer,
kLiveStreamTestsDictExpectationKey : expectation
};
MPPImage *image = [self imageWithFileInfo:kThumbUpImage];
XCTAssertTrue([gestureRecognizer recognizeAsyncImage:image timestampInMilliseconds:1 error:nil]);
NSError *error;
XCTAssertFalse([gestureRecognizer recognizeAsyncImage:image
timestampInMilliseconds:0
error:&error]);
NSError *expectedError =
[NSError errorWithDomain:kExpectedErrorDomain
code:MPPTasksErrorCodeInvalidArgumentError
userInfo:@{
NSLocalizedDescriptionKey :
@"INVALID_ARGUMENT: Input timestamp must be monotonically increasing."
}];
AssertEqualErrors(error, expectedError);
NSTimeInterval timeout = 0.5f;
[self waitForExpectations:@[ expectation ] timeout:timeout];
}
- (void)testRecognizeWithLiveStreamModeSucceeds {
MPPGestureRecognizerOptions *options =
[self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile];
options.runningMode = MPPRunningModeLiveStream;
options.gestureRecognizerLiveStreamDelegate = self;
NSInteger iterationCount = 100;
// Because of flow limiting, we cannot ensure that the callback will be invoked `iterationCount`
// times. An normal expectation will fail if expectation.fulfill() is not called
// `expectation.expectedFulfillmentCount` times. If `expectation.isInverted = true`, the test will
// only succeed if expectation is not fulfilled for the specified `expectedFulfillmentCount`.
// Since in our case we cannot predict how many times the expectation is supposed to be fullfilled
// setting, `expectation.expectedFulfillmentCount` = `iterationCount` + 1 and
// `expectation.isInverted = true` ensures that test succeeds ifexpectation is fullfilled <=
// `iterationCount` times.
XCTestExpectation *expectation =
[[XCTestExpectation alloc] initWithDescription:@"recognizeWithLiveStream"];
expectation.expectedFulfillmentCount = iterationCount + 1;
expectation.inverted = YES;
MPPGestureRecognizer *gestureRecognizer =
[self createGestureRecognizerWithOptionsSucceeds:options];
_liveStreamSucceedsTestDict = @{
kLiveStreamTestsDictGestureRecognizerKey : gestureRecognizer,
kLiveStreamTestsDictExpectationKey : expectation
};
// TODO: Mimic initialization from CMSampleBuffer as live stream mode is most likely to be used
// with the iOS camera. AVCaptureVideoDataOutput sample buffer delegates provide frames of type
// `CMSampleBuffer`.
MPPImage *image = [self imageWithFileInfo:kThumbUpImage];
for (int i = 0; i < iterationCount; i++) {
XCTAssertTrue([gestureRecognizer recognizeAsyncImage:image
timestampInMilliseconds:i
error:nil]);
}
NSTimeInterval timeout = 0.5f;
[self waitForExpectations:@[ expectation ] timeout:timeout];
}
- (void)gestureRecognizer:(MPPGestureRecognizer *)gestureRecognizer
didFinishRecognitionWithResult:(MPPGestureRecognizerResult *)gestureRecognizerResult
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
error:(NSError *)error {
[self assertGestureRecognizerResult:gestureRecognizerResult
isApproximatelyEqualToExpectedResult:[MPPGestureRecognizerTests
thumbUpGestureRecognizerResult]];
if (gestureRecognizer == _outOfOrderTimestampTestDict[kLiveStreamTestsDictGestureRecognizerKey]) {
[_outOfOrderTimestampTestDict[kLiveStreamTestsDictExpectationKey] fulfill];
} else if (gestureRecognizer ==
_liveStreamSucceedsTestDict[kLiveStreamTestsDictGestureRecognizerKey]) {
[_liveStreamSucceedsTestDict[kLiveStreamTestsDictExpectationKey] fulfill];
}
}
@end

View File

@ -59,7 +59,7 @@ using ::mediapipe::tasks::ios::test::vision::utils::get_proto_from_pbtxt;
handednessProto:{landmarkDetectionResultProto.classifications()}
handLandmarksProto:{landmarkDetectionResultProto.landmarks()}
worldLandmarksProto:{landmarkDetectionResultProto.world_landmarks()}
timestampInMilliSeconds:0];
timestampInMilliseconds:0];
}
@end

View File

@ -67,7 +67,7 @@ static const int kMicroSecondsPerMilliSecond = 1000;
handLandmarksProto
worldLandmarksProto:
(const std::vector<::mediapipe::LandmarkList> &)worldLandmarksProto
timestampInMilliSeconds:(NSInteger)timestampInMilliseconds;
timestampInMilliseconds:(NSInteger)timestampInMilliseconds;
@end
NS_ASSUME_NONNULL_END

View File

@ -46,7 +46,7 @@ static const NSInteger kDefaultGestureIndex = -1;
handLandmarksProto
worldLandmarksProto:
(const std::vector<LandmarkListProto> &)worldLandmarksProto
timestampInMilliSeconds:(NSInteger)timestampInMilliseconds {
timestampInMilliseconds:(NSInteger)timestampInMilliseconds {
NSMutableArray<NSMutableArray<MPPCategory *> *> *multiHandGestures =
[NSMutableArray arrayWithCapacity:(NSUInteger)handGesturesProto.size()];
@ -141,7 +141,7 @@ static const NSInteger kDefaultGestureIndex = -1;
std::vector<NormalizedLandmarkListProto>>()
worldLandmarksProto:worldLandmarksPacket
.Get<std::vector<LandmarkListProto>>()
timestampInMilliSeconds:timestampInMilliseconds];
timestampInMilliseconds:timestampInMilliseconds];
}
@end