diff --git a/mediapipe/tasks/ios/test/vision/face_detector/MPPFaceDetectorTests.mm b/mediapipe/tasks/ios/test/vision/face_detector/MPPFaceDetectorTests.mm index 548c4bdbf..752f4bfb9 100644 --- a/mediapipe/tasks/ios/test/vision/face_detector/MPPFaceDetectorTests.mm +++ b/mediapipe/tasks/ios/test/vision/face_detector/MPPFaceDetectorTests.mm @@ -109,7 +109,7 @@ static const float kKeypointErrorThreshold = 1e-2; NSError *error; MPPImage *mppImage = [self imageWithFileInfo:kCatImage]; - MPPFaceDetectorResult *faceDetectorResult = [faceDetector detectInImage:mppImage error:&error]; + MPPFaceDetectorResult *faceDetectorResult = [faceDetector detectImage:mppImage error:&error]; XCTAssertNil(error); XCTAssertNotNil(faceDetectorResult); XCTAssertEqual(faceDetectorResult.detections.count, 0); @@ -125,9 +125,9 @@ static const float kKeypointErrorThreshold = 1e-2; MPPImage *image = [self imageWithFileInfo:kPortraitImage]; for (int i = 0; i < 3; i++) { - MPPFaceDetectorResult *faceDetectorResult = [faceDetector detectInVideoFrame:image - timestampInMilliseconds:i - error:nil]; + MPPFaceDetectorResult *faceDetectorResult = [faceDetector detectVideoFrame:image + timestampInMilliseconds:i + error:nil]; [self assertFaceDetectorResult:faceDetectorResult containsExpectedKeypoints:kPortraitExpectedKeypoints]; } @@ -141,9 +141,9 @@ static const float kKeypointErrorThreshold = 1e-2; MPPImage *image = [self imageWithFileInfo:kPortraitRotatedImage]; for (int i = 0; i < 3; i++) { - MPPFaceDetectorResult *faceDetectorResult = [faceDetector detectInVideoFrame:image - timestampInMilliseconds:i - error:nil]; + MPPFaceDetectorResult *faceDetectorResult = [faceDetector detectVideoFrame:image + timestampInMilliseconds:i + error:nil]; [self assertFaceDetectorResult:faceDetectorResult containsExpectedKeypoints:kPortraitRotatedExpectedKeypoints]; } @@ -181,7 +181,7 @@ static const float kKeypointErrorThreshold = 1e-2; }; for (int i = 0; i < iterationCount; i++) { - XCTAssertTrue([faceDetector detectAsyncInImage:image timestampInMilliseconds:i error:nil]); + XCTAssertTrue([faceDetector detectAsyncImage:image timestampInMilliseconds:i error:nil]); } NSTimeInterval timeout = 0.5f; @@ -205,10 +205,10 @@ static const float kKeypointErrorThreshold = 1e-2; }; MPPImage *image = [self imageWithFileInfo:kPortraitImage]; - XCTAssertTrue([faceDetector detectAsyncInImage:image timestampInMilliseconds:1 error:nil]); + XCTAssertTrue([faceDetector detectAsyncImage:image timestampInMilliseconds:1 error:nil]); NSError *error; - XCTAssertFalse([faceDetector detectAsyncInImage:image timestampInMilliseconds:0 error:&error]); + XCTAssertFalse([faceDetector detectAsyncImage:image timestampInMilliseconds:0 error:&error]); NSError *expectedError = [NSError errorWithDomain:kExpectedErrorDomain @@ -274,9 +274,9 @@ static const float kKeypointErrorThreshold = 1e-2; MPPImage *image = [self imageWithFileInfo:kPortraitImage]; NSError *liveStreamApiCallError; - XCTAssertFalse([faceDetector detectAsyncInImage:image - timestampInMilliseconds:0 - error:&liveStreamApiCallError]); + XCTAssertFalse([faceDetector detectAsyncImage:image + timestampInMilliseconds:0 + error:&liveStreamApiCallError]); NSError *expectedLiveStreamApiCallError = [NSError errorWithDomain:kExpectedErrorDomain @@ -288,9 +288,9 @@ static const float kKeypointErrorThreshold = 1e-2; AssertEqualErrors(liveStreamApiCallError, expectedLiveStreamApiCallError); NSError *videoApiCallError; - XCTAssertFalse([faceDetector detectInVideoFrame:image - timestampInMilliseconds:0 - error:&videoApiCallError]); + XCTAssertFalse([faceDetector detectVideoFrame:image + timestampInMilliseconds:0 + error:&videoApiCallError]); NSError *expectedVideoApiCallError = [NSError errorWithDomain:kExpectedErrorDomain @@ -312,9 +312,9 @@ static const float kKeypointErrorThreshold = 1e-2; MPPImage *image = [self imageWithFileInfo:kPortraitImage]; NSError *liveStreamApiCallError; - XCTAssertFalse([faceDetector detectAsyncInImage:image - timestampInMilliseconds:0 - error:&liveStreamApiCallError]); + XCTAssertFalse([faceDetector detectAsyncImage:image + timestampInMilliseconds:0 + error:&liveStreamApiCallError]); NSError *expectedLiveStreamApiCallError = [NSError errorWithDomain:kExpectedErrorDomain @@ -326,7 +326,7 @@ static const float kKeypointErrorThreshold = 1e-2; AssertEqualErrors(liveStreamApiCallError, expectedLiveStreamApiCallError); NSError *imageApiCallError; - XCTAssertFalse([faceDetector detectInImage:image error:&imageApiCallError]); + XCTAssertFalse([faceDetector detectImage:image error:&imageApiCallError]); NSError *expectedImageApiCallError = [NSError errorWithDomain:kExpectedErrorDomain @@ -350,7 +350,7 @@ static const float kKeypointErrorThreshold = 1e-2; MPPImage *image = [self imageWithFileInfo:kPortraitImage]; NSError *imageApiCallError; - XCTAssertFalse([faceDetector detectInImage:image error:&imageApiCallError]); + XCTAssertFalse([faceDetector detectImage:image error:&imageApiCallError]); NSError *expectedImageApiCallError = [NSError errorWithDomain:kExpectedErrorDomain @@ -362,9 +362,9 @@ static const float kKeypointErrorThreshold = 1e-2; AssertEqualErrors(imageApiCallError, expectedImageApiCallError); NSError *videoApiCallError; - XCTAssertFalse([faceDetector detectInVideoFrame:image - timestampInMilliseconds:0 - error:&videoApiCallError]); + XCTAssertFalse([faceDetector detectVideoFrame:image + timestampInMilliseconds:0 + error:&videoApiCallError]); NSError *expectedVideoApiCallError = [NSError errorWithDomain:kExpectedErrorDomain @@ -407,7 +407,7 @@ static const float kKeypointErrorThreshold = 1e-2; MPPImage *image = [self imageWithFileInfo:kPortraitImage]; for (int i = 0; i < iterationCount; i++) { - XCTAssertTrue([faceDetector detectAsyncInImage:image timestampInMilliseconds:i error:nil]); + XCTAssertTrue([faceDetector detectAsyncImage:image timestampInMilliseconds:i error:nil]); } NSTimeInterval timeout = 0.5f; @@ -503,7 +503,7 @@ static const float kKeypointErrorThreshold = 1e-2; usingFaceDetector:(MPPFaceDetector *)faceDetector containsExpectedKeypoints:(NSArray *)expectedKeypoints { NSError *error; - MPPFaceDetectorResult *faceDetectorResult = [faceDetector detectInImage:mppImage error:&error]; + MPPFaceDetectorResult *faceDetectorResult = [faceDetector detectImage:mppImage error:&error]; XCTAssertNil(error); XCTAssertNotNil(faceDetectorResult); [self assertFaceDetectorResult:faceDetectorResult containsExpectedKeypoints:expectedKeypoints]; diff --git a/mediapipe/tasks/ios/vision/face_detector/sources/MPPFaceDetector.h b/mediapipe/tasks/ios/vision/face_detector/sources/MPPFaceDetector.h index 8adb40679..89cf3d2d3 100644 --- a/mediapipe/tasks/ios/vision/face_detector/sources/MPPFaceDetector.h +++ b/mediapipe/tasks/ios/vision/face_detector/sources/MPPFaceDetector.h @@ -100,7 +100,7 @@ NS_SWIFT_NAME(FaceDetector) * system, i.e. in `[0,image_width) x [0,image_height)`, which are the dimensions of the underlying * image data. */ -- (nullable MPPFaceDetectorResult *)detectInImage:(MPPImage *)image +- (nullable MPPFaceDetectorResult *)detectImage:(MPPImage *)image error:(NSError **)error NS_SWIFT_NAME(detect(image:)); /** @@ -127,7 +127,7 @@ NS_SWIFT_NAME(FaceDetector) * system, i.e. in `[0,image_width) x [0,image_height)`, which are the dimensions of the underlying * image data. */ -- (nullable MPPFaceDetectorResult *)detectInVideoFrame:(MPPImage *)image +- (nullable MPPFaceDetectorResult *)detectVideoFrame:(MPPImage *)image timestampInMilliseconds:(NSInteger)timestampInMilliseconds error:(NSError **)error NS_SWIFT_NAME(detect(videoFrame:timestampInMilliseconds:)); @@ -165,7 +165,7 @@ NS_SWIFT_NAME(FaceDetector) * * @return `true` if the image was sent to the task successfully, otherwise `false`. */ -- (BOOL)detectAsyncInImage:(MPPImage *)image +- (BOOL)detectAsyncImage:(MPPImage *)image timestampInMilliseconds:(NSInteger)timestampInMilliseconds error:(NSError **)error NS_SWIFT_NAME(detectAsync(image:timestampInMilliseconds:)); diff --git a/mediapipe/tasks/ios/vision/face_detector/sources/MPPFaceDetector.mm b/mediapipe/tasks/ios/vision/face_detector/sources/MPPFaceDetector.mm index 8e82281b4..96434eb47 100644 --- a/mediapipe/tasks/ios/vision/face_detector/sources/MPPFaceDetector.mm +++ b/mediapipe/tasks/ios/vision/face_detector/sources/MPPFaceDetector.mm @@ -130,13 +130,13 @@ static NSString *const kTaskName = @"faceDetector"; return [self initWithOptions:options error:error]; } -- (nullable MPPFaceDetectorResult *)detectInImage:(MPPImage *)image error:(NSError **)error { +- (nullable MPPFaceDetectorResult *)detectImage:(MPPImage *)image error:(NSError **)error { std::optional outputPacketMap = [_visionTaskRunner processImage:image error:error]; return [MPPFaceDetector faceDetectorResultWithOptionalOutputPacketMap:outputPacketMap]; } -- (nullable MPPFaceDetectorResult *)detectInVideoFrame:(MPPImage *)image +- (nullable MPPFaceDetectorResult *)detectVideoFrame:(MPPImage *)image timestampInMilliseconds:(NSInteger)timestampInMilliseconds error:(NSError **)error { std::optional outputPacketMap = @@ -147,7 +147,7 @@ static NSString *const kTaskName = @"faceDetector"; return [MPPFaceDetector faceDetectorResultWithOptionalOutputPacketMap:outputPacketMap]; } -- (BOOL)detectAsyncInImage:(MPPImage *)image +- (BOOL)detectAsyncImage:(MPPImage *)image timestampInMilliseconds:(NSInteger)timestampInMilliseconds error:(NSError **)error { return [_visionTaskRunner processLiveStreamImage:image