Updated iOS Face Detector Objective C API names

This commit is contained in:
Prianka Liz Kariat 2023-09-23 00:07:20 +05:30
parent 34cedb980b
commit d7c57e4eda
3 changed files with 32 additions and 32 deletions

View File

@ -109,7 +109,7 @@ static const float kKeypointErrorThreshold = 1e-2;
NSError *error; NSError *error;
MPPImage *mppImage = [self imageWithFileInfo:kCatImage]; MPPImage *mppImage = [self imageWithFileInfo:kCatImage];
MPPFaceDetectorResult *faceDetectorResult = [faceDetector detectInImage:mppImage error:&error]; MPPFaceDetectorResult *faceDetectorResult = [faceDetector detectImage:mppImage error:&error];
XCTAssertNil(error); XCTAssertNil(error);
XCTAssertNotNil(faceDetectorResult); XCTAssertNotNil(faceDetectorResult);
XCTAssertEqual(faceDetectorResult.detections.count, 0); XCTAssertEqual(faceDetectorResult.detections.count, 0);
@ -125,9 +125,9 @@ static const float kKeypointErrorThreshold = 1e-2;
MPPImage *image = [self imageWithFileInfo:kPortraitImage]; MPPImage *image = [self imageWithFileInfo:kPortraitImage];
for (int i = 0; i < 3; i++) { for (int i = 0; i < 3; i++) {
MPPFaceDetectorResult *faceDetectorResult = [faceDetector detectInVideoFrame:image MPPFaceDetectorResult *faceDetectorResult = [faceDetector detectVideoFrame:image
timestampInMilliseconds:i timestampInMilliseconds:i
error:nil]; error:nil];
[self assertFaceDetectorResult:faceDetectorResult [self assertFaceDetectorResult:faceDetectorResult
containsExpectedKeypoints:kPortraitExpectedKeypoints]; containsExpectedKeypoints:kPortraitExpectedKeypoints];
} }
@ -141,9 +141,9 @@ static const float kKeypointErrorThreshold = 1e-2;
MPPImage *image = [self imageWithFileInfo:kPortraitRotatedImage]; MPPImage *image = [self imageWithFileInfo:kPortraitRotatedImage];
for (int i = 0; i < 3; i++) { for (int i = 0; i < 3; i++) {
MPPFaceDetectorResult *faceDetectorResult = [faceDetector detectInVideoFrame:image MPPFaceDetectorResult *faceDetectorResult = [faceDetector detectVideoFrame:image
timestampInMilliseconds:i timestampInMilliseconds:i
error:nil]; error:nil];
[self assertFaceDetectorResult:faceDetectorResult [self assertFaceDetectorResult:faceDetectorResult
containsExpectedKeypoints:kPortraitRotatedExpectedKeypoints]; containsExpectedKeypoints:kPortraitRotatedExpectedKeypoints];
} }
@ -181,7 +181,7 @@ static const float kKeypointErrorThreshold = 1e-2;
}; };
for (int i = 0; i < iterationCount; i++) { for (int i = 0; i < iterationCount; i++) {
XCTAssertTrue([faceDetector detectAsyncInImage:image timestampInMilliseconds:i error:nil]); XCTAssertTrue([faceDetector detectAsyncImage:image timestampInMilliseconds:i error:nil]);
} }
NSTimeInterval timeout = 0.5f; NSTimeInterval timeout = 0.5f;
@ -205,10 +205,10 @@ static const float kKeypointErrorThreshold = 1e-2;
}; };
MPPImage *image = [self imageWithFileInfo:kPortraitImage]; MPPImage *image = [self imageWithFileInfo:kPortraitImage];
XCTAssertTrue([faceDetector detectAsyncInImage:image timestampInMilliseconds:1 error:nil]); XCTAssertTrue([faceDetector detectAsyncImage:image timestampInMilliseconds:1 error:nil]);
NSError *error; NSError *error;
XCTAssertFalse([faceDetector detectAsyncInImage:image timestampInMilliseconds:0 error:&error]); XCTAssertFalse([faceDetector detectAsyncImage:image timestampInMilliseconds:0 error:&error]);
NSError *expectedError = NSError *expectedError =
[NSError errorWithDomain:kExpectedErrorDomain [NSError errorWithDomain:kExpectedErrorDomain
@ -274,9 +274,9 @@ static const float kKeypointErrorThreshold = 1e-2;
MPPImage *image = [self imageWithFileInfo:kPortraitImage]; MPPImage *image = [self imageWithFileInfo:kPortraitImage];
NSError *liveStreamApiCallError; NSError *liveStreamApiCallError;
XCTAssertFalse([faceDetector detectAsyncInImage:image XCTAssertFalse([faceDetector detectAsyncImage:image
timestampInMilliseconds:0 timestampInMilliseconds:0
error:&liveStreamApiCallError]); error:&liveStreamApiCallError]);
NSError *expectedLiveStreamApiCallError = NSError *expectedLiveStreamApiCallError =
[NSError errorWithDomain:kExpectedErrorDomain [NSError errorWithDomain:kExpectedErrorDomain
@ -288,9 +288,9 @@ static const float kKeypointErrorThreshold = 1e-2;
AssertEqualErrors(liveStreamApiCallError, expectedLiveStreamApiCallError); AssertEqualErrors(liveStreamApiCallError, expectedLiveStreamApiCallError);
NSError *videoApiCallError; NSError *videoApiCallError;
XCTAssertFalse([faceDetector detectInVideoFrame:image XCTAssertFalse([faceDetector detectVideoFrame:image
timestampInMilliseconds:0 timestampInMilliseconds:0
error:&videoApiCallError]); error:&videoApiCallError]);
NSError *expectedVideoApiCallError = NSError *expectedVideoApiCallError =
[NSError errorWithDomain:kExpectedErrorDomain [NSError errorWithDomain:kExpectedErrorDomain
@ -312,9 +312,9 @@ static const float kKeypointErrorThreshold = 1e-2;
MPPImage *image = [self imageWithFileInfo:kPortraitImage]; MPPImage *image = [self imageWithFileInfo:kPortraitImage];
NSError *liveStreamApiCallError; NSError *liveStreamApiCallError;
XCTAssertFalse([faceDetector detectAsyncInImage:image XCTAssertFalse([faceDetector detectAsyncImage:image
timestampInMilliseconds:0 timestampInMilliseconds:0
error:&liveStreamApiCallError]); error:&liveStreamApiCallError]);
NSError *expectedLiveStreamApiCallError = NSError *expectedLiveStreamApiCallError =
[NSError errorWithDomain:kExpectedErrorDomain [NSError errorWithDomain:kExpectedErrorDomain
@ -326,7 +326,7 @@ static const float kKeypointErrorThreshold = 1e-2;
AssertEqualErrors(liveStreamApiCallError, expectedLiveStreamApiCallError); AssertEqualErrors(liveStreamApiCallError, expectedLiveStreamApiCallError);
NSError *imageApiCallError; NSError *imageApiCallError;
XCTAssertFalse([faceDetector detectInImage:image error:&imageApiCallError]); XCTAssertFalse([faceDetector detectImage:image error:&imageApiCallError]);
NSError *expectedImageApiCallError = NSError *expectedImageApiCallError =
[NSError errorWithDomain:kExpectedErrorDomain [NSError errorWithDomain:kExpectedErrorDomain
@ -350,7 +350,7 @@ static const float kKeypointErrorThreshold = 1e-2;
MPPImage *image = [self imageWithFileInfo:kPortraitImage]; MPPImage *image = [self imageWithFileInfo:kPortraitImage];
NSError *imageApiCallError; NSError *imageApiCallError;
XCTAssertFalse([faceDetector detectInImage:image error:&imageApiCallError]); XCTAssertFalse([faceDetector detectImage:image error:&imageApiCallError]);
NSError *expectedImageApiCallError = NSError *expectedImageApiCallError =
[NSError errorWithDomain:kExpectedErrorDomain [NSError errorWithDomain:kExpectedErrorDomain
@ -362,9 +362,9 @@ static const float kKeypointErrorThreshold = 1e-2;
AssertEqualErrors(imageApiCallError, expectedImageApiCallError); AssertEqualErrors(imageApiCallError, expectedImageApiCallError);
NSError *videoApiCallError; NSError *videoApiCallError;
XCTAssertFalse([faceDetector detectInVideoFrame:image XCTAssertFalse([faceDetector detectVideoFrame:image
timestampInMilliseconds:0 timestampInMilliseconds:0
error:&videoApiCallError]); error:&videoApiCallError]);
NSError *expectedVideoApiCallError = NSError *expectedVideoApiCallError =
[NSError errorWithDomain:kExpectedErrorDomain [NSError errorWithDomain:kExpectedErrorDomain
@ -407,7 +407,7 @@ static const float kKeypointErrorThreshold = 1e-2;
MPPImage *image = [self imageWithFileInfo:kPortraitImage]; MPPImage *image = [self imageWithFileInfo:kPortraitImage];
for (int i = 0; i < iterationCount; i++) { for (int i = 0; i < iterationCount; i++) {
XCTAssertTrue([faceDetector detectAsyncInImage:image timestampInMilliseconds:i error:nil]); XCTAssertTrue([faceDetector detectAsyncImage:image timestampInMilliseconds:i error:nil]);
} }
NSTimeInterval timeout = 0.5f; NSTimeInterval timeout = 0.5f;
@ -503,7 +503,7 @@ static const float kKeypointErrorThreshold = 1e-2;
usingFaceDetector:(MPPFaceDetector *)faceDetector usingFaceDetector:(MPPFaceDetector *)faceDetector
containsExpectedKeypoints:(NSArray<NSArray *> *)expectedKeypoints { containsExpectedKeypoints:(NSArray<NSArray *> *)expectedKeypoints {
NSError *error; NSError *error;
MPPFaceDetectorResult *faceDetectorResult = [faceDetector detectInImage:mppImage error:&error]; MPPFaceDetectorResult *faceDetectorResult = [faceDetector detectImage:mppImage error:&error];
XCTAssertNil(error); XCTAssertNil(error);
XCTAssertNotNil(faceDetectorResult); XCTAssertNotNil(faceDetectorResult);
[self assertFaceDetectorResult:faceDetectorResult containsExpectedKeypoints:expectedKeypoints]; [self assertFaceDetectorResult:faceDetectorResult containsExpectedKeypoints:expectedKeypoints];

View File

@ -100,7 +100,7 @@ NS_SWIFT_NAME(FaceDetector)
* system, i.e. in `[0,image_width) x [0,image_height)`, which are the dimensions of the underlying * system, i.e. in `[0,image_width) x [0,image_height)`, which are the dimensions of the underlying
* image data. * image data.
*/ */
- (nullable MPPFaceDetectorResult *)detectInImage:(MPPImage *)image - (nullable MPPFaceDetectorResult *)detectImage:(MPPImage *)image
error:(NSError **)error NS_SWIFT_NAME(detect(image:)); error:(NSError **)error NS_SWIFT_NAME(detect(image:));
/** /**
@ -127,7 +127,7 @@ NS_SWIFT_NAME(FaceDetector)
* system, i.e. in `[0,image_width) x [0,image_height)`, which are the dimensions of the underlying * system, i.e. in `[0,image_width) x [0,image_height)`, which are the dimensions of the underlying
* image data. * image data.
*/ */
- (nullable MPPFaceDetectorResult *)detectInVideoFrame:(MPPImage *)image - (nullable MPPFaceDetectorResult *)detectVideoFrame:(MPPImage *)image
timestampInMilliseconds:(NSInteger)timestampInMilliseconds timestampInMilliseconds:(NSInteger)timestampInMilliseconds
error:(NSError **)error error:(NSError **)error
NS_SWIFT_NAME(detect(videoFrame:timestampInMilliseconds:)); NS_SWIFT_NAME(detect(videoFrame:timestampInMilliseconds:));
@ -165,7 +165,7 @@ NS_SWIFT_NAME(FaceDetector)
* *
* @return `true` if the image was sent to the task successfully, otherwise `false`. * @return `true` if the image was sent to the task successfully, otherwise `false`.
*/ */
- (BOOL)detectAsyncInImage:(MPPImage *)image - (BOOL)detectAsyncImage:(MPPImage *)image
timestampInMilliseconds:(NSInteger)timestampInMilliseconds timestampInMilliseconds:(NSInteger)timestampInMilliseconds
error:(NSError **)error error:(NSError **)error
NS_SWIFT_NAME(detectAsync(image:timestampInMilliseconds:)); NS_SWIFT_NAME(detectAsync(image:timestampInMilliseconds:));

View File

@ -130,13 +130,13 @@ static NSString *const kTaskName = @"faceDetector";
return [self initWithOptions:options error:error]; return [self initWithOptions:options error:error];
} }
- (nullable MPPFaceDetectorResult *)detectInImage:(MPPImage *)image error:(NSError **)error { - (nullable MPPFaceDetectorResult *)detectImage:(MPPImage *)image error:(NSError **)error {
std::optional<PacketMap> outputPacketMap = [_visionTaskRunner processImage:image error:error]; std::optional<PacketMap> outputPacketMap = [_visionTaskRunner processImage:image error:error];
return [MPPFaceDetector faceDetectorResultWithOptionalOutputPacketMap:outputPacketMap]; return [MPPFaceDetector faceDetectorResultWithOptionalOutputPacketMap:outputPacketMap];
} }
- (nullable MPPFaceDetectorResult *)detectInVideoFrame:(MPPImage *)image - (nullable MPPFaceDetectorResult *)detectVideoFrame:(MPPImage *)image
timestampInMilliseconds:(NSInteger)timestampInMilliseconds timestampInMilliseconds:(NSInteger)timestampInMilliseconds
error:(NSError **)error { error:(NSError **)error {
std::optional<PacketMap> outputPacketMap = std::optional<PacketMap> outputPacketMap =
@ -147,7 +147,7 @@ static NSString *const kTaskName = @"faceDetector";
return [MPPFaceDetector faceDetectorResultWithOptionalOutputPacketMap:outputPacketMap]; return [MPPFaceDetector faceDetectorResultWithOptionalOutputPacketMap:outputPacketMap];
} }
- (BOOL)detectAsyncInImage:(MPPImage *)image - (BOOL)detectAsyncImage:(MPPImage *)image
timestampInMilliseconds:(NSInteger)timestampInMilliseconds timestampInMilliseconds:(NSInteger)timestampInMilliseconds
error:(NSError **)error { error:(NSError **)error {
return [_visionTaskRunner processLiveStreamImage:image return [_visionTaskRunner processLiveStreamImage:image