Merge pull request #4829 from priankakariatyml:ios-vision-api-name-changes
PiperOrigin-RevId: 568622587
This commit is contained in:
		
						commit
						0417817886
					
				| 
						 | 
				
			
			@ -109,7 +109,7 @@ static const float kKeypointErrorThreshold = 1e-2;
 | 
			
		|||
 | 
			
		||||
  NSError *error;
 | 
			
		||||
  MPPImage *mppImage = [self imageWithFileInfo:kCatImage];
 | 
			
		||||
  MPPFaceDetectorResult *faceDetectorResult = [faceDetector detectInImage:mppImage error:&error];
 | 
			
		||||
  MPPFaceDetectorResult *faceDetectorResult = [faceDetector detectImage:mppImage error:&error];
 | 
			
		||||
  XCTAssertNil(error);
 | 
			
		||||
  XCTAssertNotNil(faceDetectorResult);
 | 
			
		||||
  XCTAssertEqual(faceDetectorResult.detections.count, 0);
 | 
			
		||||
| 
						 | 
				
			
			@ -125,9 +125,9 @@ static const float kKeypointErrorThreshold = 1e-2;
 | 
			
		|||
 | 
			
		||||
  MPPImage *image = [self imageWithFileInfo:kPortraitImage];
 | 
			
		||||
  for (int i = 0; i < 3; i++) {
 | 
			
		||||
    MPPFaceDetectorResult *faceDetectorResult = [faceDetector detectInVideoFrame:image
 | 
			
		||||
                                                         timestampInMilliseconds:i
 | 
			
		||||
                                                                           error:nil];
 | 
			
		||||
    MPPFaceDetectorResult *faceDetectorResult = [faceDetector detectVideoFrame:image
 | 
			
		||||
                                                       timestampInMilliseconds:i
 | 
			
		||||
                                                                         error:nil];
 | 
			
		||||
    [self assertFaceDetectorResult:faceDetectorResult
 | 
			
		||||
         containsExpectedKeypoints:kPortraitExpectedKeypoints];
 | 
			
		||||
  }
 | 
			
		||||
| 
						 | 
				
			
			@ -141,9 +141,9 @@ static const float kKeypointErrorThreshold = 1e-2;
 | 
			
		|||
 | 
			
		||||
  MPPImage *image = [self imageWithFileInfo:kPortraitRotatedImage];
 | 
			
		||||
  for (int i = 0; i < 3; i++) {
 | 
			
		||||
    MPPFaceDetectorResult *faceDetectorResult = [faceDetector detectInVideoFrame:image
 | 
			
		||||
                                                         timestampInMilliseconds:i
 | 
			
		||||
                                                                           error:nil];
 | 
			
		||||
    MPPFaceDetectorResult *faceDetectorResult = [faceDetector detectVideoFrame:image
 | 
			
		||||
                                                       timestampInMilliseconds:i
 | 
			
		||||
                                                                         error:nil];
 | 
			
		||||
    [self assertFaceDetectorResult:faceDetectorResult
 | 
			
		||||
         containsExpectedKeypoints:kPortraitRotatedExpectedKeypoints];
 | 
			
		||||
  }
 | 
			
		||||
| 
						 | 
				
			
			@ -181,7 +181,7 @@ static const float kKeypointErrorThreshold = 1e-2;
 | 
			
		|||
  };
 | 
			
		||||
 | 
			
		||||
  for (int i = 0; i < iterationCount; i++) {
 | 
			
		||||
    XCTAssertTrue([faceDetector detectAsyncInImage:image timestampInMilliseconds:i error:nil]);
 | 
			
		||||
    XCTAssertTrue([faceDetector detectAsyncImage:image timestampInMilliseconds:i error:nil]);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  NSTimeInterval timeout = 0.5f;
 | 
			
		||||
| 
						 | 
				
			
			@ -205,10 +205,10 @@ static const float kKeypointErrorThreshold = 1e-2;
 | 
			
		|||
  };
 | 
			
		||||
 | 
			
		||||
  MPPImage *image = [self imageWithFileInfo:kPortraitImage];
 | 
			
		||||
  XCTAssertTrue([faceDetector detectAsyncInImage:image timestampInMilliseconds:1 error:nil]);
 | 
			
		||||
  XCTAssertTrue([faceDetector detectAsyncImage:image timestampInMilliseconds:1 error:nil]);
 | 
			
		||||
 | 
			
		||||
  NSError *error;
 | 
			
		||||
  XCTAssertFalse([faceDetector detectAsyncInImage:image timestampInMilliseconds:0 error:&error]);
 | 
			
		||||
  XCTAssertFalse([faceDetector detectAsyncImage:image timestampInMilliseconds:0 error:&error]);
 | 
			
		||||
 | 
			
		||||
  NSError *expectedError =
 | 
			
		||||
      [NSError errorWithDomain:kExpectedErrorDomain
 | 
			
		||||
| 
						 | 
				
			
			@ -274,9 +274,9 @@ static const float kKeypointErrorThreshold = 1e-2;
 | 
			
		|||
  MPPImage *image = [self imageWithFileInfo:kPortraitImage];
 | 
			
		||||
 | 
			
		||||
  NSError *liveStreamApiCallError;
 | 
			
		||||
  XCTAssertFalse([faceDetector detectAsyncInImage:image
 | 
			
		||||
                          timestampInMilliseconds:0
 | 
			
		||||
                                            error:&liveStreamApiCallError]);
 | 
			
		||||
  XCTAssertFalse([faceDetector detectAsyncImage:image
 | 
			
		||||
                        timestampInMilliseconds:0
 | 
			
		||||
                                          error:&liveStreamApiCallError]);
 | 
			
		||||
 | 
			
		||||
  NSError *expectedLiveStreamApiCallError =
 | 
			
		||||
      [NSError errorWithDomain:kExpectedErrorDomain
 | 
			
		||||
| 
						 | 
				
			
			@ -288,9 +288,9 @@ static const float kKeypointErrorThreshold = 1e-2;
 | 
			
		|||
  AssertEqualErrors(liveStreamApiCallError, expectedLiveStreamApiCallError);
 | 
			
		||||
 | 
			
		||||
  NSError *videoApiCallError;
 | 
			
		||||
  XCTAssertFalse([faceDetector detectInVideoFrame:image
 | 
			
		||||
                          timestampInMilliseconds:0
 | 
			
		||||
                                            error:&videoApiCallError]);
 | 
			
		||||
  XCTAssertFalse([faceDetector detectVideoFrame:image
 | 
			
		||||
                        timestampInMilliseconds:0
 | 
			
		||||
                                          error:&videoApiCallError]);
 | 
			
		||||
 | 
			
		||||
  NSError *expectedVideoApiCallError =
 | 
			
		||||
      [NSError errorWithDomain:kExpectedErrorDomain
 | 
			
		||||
| 
						 | 
				
			
			@ -312,9 +312,9 @@ static const float kKeypointErrorThreshold = 1e-2;
 | 
			
		|||
  MPPImage *image = [self imageWithFileInfo:kPortraitImage];
 | 
			
		||||
 | 
			
		||||
  NSError *liveStreamApiCallError;
 | 
			
		||||
  XCTAssertFalse([faceDetector detectAsyncInImage:image
 | 
			
		||||
                          timestampInMilliseconds:0
 | 
			
		||||
                                            error:&liveStreamApiCallError]);
 | 
			
		||||
  XCTAssertFalse([faceDetector detectAsyncImage:image
 | 
			
		||||
                        timestampInMilliseconds:0
 | 
			
		||||
                                          error:&liveStreamApiCallError]);
 | 
			
		||||
 | 
			
		||||
  NSError *expectedLiveStreamApiCallError =
 | 
			
		||||
      [NSError errorWithDomain:kExpectedErrorDomain
 | 
			
		||||
| 
						 | 
				
			
			@ -326,7 +326,7 @@ static const float kKeypointErrorThreshold = 1e-2;
 | 
			
		|||
  AssertEqualErrors(liveStreamApiCallError, expectedLiveStreamApiCallError);
 | 
			
		||||
 | 
			
		||||
  NSError *imageApiCallError;
 | 
			
		||||
  XCTAssertFalse([faceDetector detectInImage:image error:&imageApiCallError]);
 | 
			
		||||
  XCTAssertFalse([faceDetector detectImage:image error:&imageApiCallError]);
 | 
			
		||||
 | 
			
		||||
  NSError *expectedImageApiCallError =
 | 
			
		||||
      [NSError errorWithDomain:kExpectedErrorDomain
 | 
			
		||||
| 
						 | 
				
			
			@ -350,7 +350,7 @@ static const float kKeypointErrorThreshold = 1e-2;
 | 
			
		|||
  MPPImage *image = [self imageWithFileInfo:kPortraitImage];
 | 
			
		||||
 | 
			
		||||
  NSError *imageApiCallError;
 | 
			
		||||
  XCTAssertFalse([faceDetector detectInImage:image error:&imageApiCallError]);
 | 
			
		||||
  XCTAssertFalse([faceDetector detectImage:image error:&imageApiCallError]);
 | 
			
		||||
 | 
			
		||||
  NSError *expectedImageApiCallError =
 | 
			
		||||
      [NSError errorWithDomain:kExpectedErrorDomain
 | 
			
		||||
| 
						 | 
				
			
			@ -362,9 +362,9 @@ static const float kKeypointErrorThreshold = 1e-2;
 | 
			
		|||
  AssertEqualErrors(imageApiCallError, expectedImageApiCallError);
 | 
			
		||||
 | 
			
		||||
  NSError *videoApiCallError;
 | 
			
		||||
  XCTAssertFalse([faceDetector detectInVideoFrame:image
 | 
			
		||||
                          timestampInMilliseconds:0
 | 
			
		||||
                                            error:&videoApiCallError]);
 | 
			
		||||
  XCTAssertFalse([faceDetector detectVideoFrame:image
 | 
			
		||||
                        timestampInMilliseconds:0
 | 
			
		||||
                                          error:&videoApiCallError]);
 | 
			
		||||
 | 
			
		||||
  NSError *expectedVideoApiCallError =
 | 
			
		||||
      [NSError errorWithDomain:kExpectedErrorDomain
 | 
			
		||||
| 
						 | 
				
			
			@ -407,7 +407,7 @@ static const float kKeypointErrorThreshold = 1e-2;
 | 
			
		|||
 | 
			
		||||
  MPPImage *image = [self imageWithFileInfo:kPortraitImage];
 | 
			
		||||
  for (int i = 0; i < iterationCount; i++) {
 | 
			
		||||
    XCTAssertTrue([faceDetector detectAsyncInImage:image timestampInMilliseconds:i error:nil]);
 | 
			
		||||
    XCTAssertTrue([faceDetector detectAsyncImage:image timestampInMilliseconds:i error:nil]);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  NSTimeInterval timeout = 0.5f;
 | 
			
		||||
| 
						 | 
				
			
			@ -503,7 +503,7 @@ static const float kKeypointErrorThreshold = 1e-2;
 | 
			
		|||
                   usingFaceDetector:(MPPFaceDetector *)faceDetector
 | 
			
		||||
           containsExpectedKeypoints:(NSArray<NSArray *> *)expectedKeypoints {
 | 
			
		||||
  NSError *error;
 | 
			
		||||
  MPPFaceDetectorResult *faceDetectorResult = [faceDetector detectInImage:mppImage error:&error];
 | 
			
		||||
  MPPFaceDetectorResult *faceDetectorResult = [faceDetector detectImage:mppImage error:&error];
 | 
			
		||||
  XCTAssertNil(error);
 | 
			
		||||
  XCTAssertNotNil(faceDetectorResult);
 | 
			
		||||
  [self assertFaceDetectorResult:faceDetectorResult containsExpectedKeypoints:expectedKeypoints];
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -137,8 +137,8 @@ constexpr float kFacialTransformationMatrixErrorThreshold = 0.2f;
 | 
			
		|||
 | 
			
		||||
  NSError *error;
 | 
			
		||||
  MPPImage *mppImage = [self imageWithFileInfo:kCatImage];
 | 
			
		||||
  MPPFaceLandmarkerResult *faceLandmarkerResult = [faceLandmarker detectInImage:mppImage
 | 
			
		||||
                                                                          error:&error];
 | 
			
		||||
  MPPFaceLandmarkerResult *faceLandmarkerResult = [faceLandmarker detectImage:mppImage
 | 
			
		||||
                                                                        error:&error];
 | 
			
		||||
  XCTAssertNil(error);
 | 
			
		||||
  XCTAssertNotNil(faceLandmarkerResult);
 | 
			
		||||
  XCTAssertEqualObjects(faceLandmarkerResult.faceLandmarks, [NSArray array]);
 | 
			
		||||
| 
						 | 
				
			
			@ -158,9 +158,9 @@ constexpr float kFacialTransformationMatrixErrorThreshold = 0.2f;
 | 
			
		|||
  NSArray<MPPNormalizedLandmark *> *expectedLandmarks =
 | 
			
		||||
      [MPPFaceLandmarkerTests expectedLandmarksFromFileInfo:kPortraitExpectedLandmarksName];
 | 
			
		||||
  for (int i = 0; i < 3; i++) {
 | 
			
		||||
    MPPFaceLandmarkerResult *faceLandmarkerResult = [faceLandmarker detectInVideoFrame:image
 | 
			
		||||
                                                               timestampInMilliseconds:i
 | 
			
		||||
                                                                                 error:nil];
 | 
			
		||||
    MPPFaceLandmarkerResult *faceLandmarkerResult = [faceLandmarker detectVideoFrame:image
 | 
			
		||||
                                                             timestampInMilliseconds:i
 | 
			
		||||
                                                                               error:nil];
 | 
			
		||||
    [self assertFaceLandmarkerResult:faceLandmarkerResult
 | 
			
		||||
           containsExpectedLandmarks:expectedLandmarks
 | 
			
		||||
                 expectedBlendshapes:NULL
 | 
			
		||||
| 
						 | 
				
			
			@ -200,7 +200,7 @@ constexpr float kFacialTransformationMatrixErrorThreshold = 0.2f;
 | 
			
		|||
  };
 | 
			
		||||
 | 
			
		||||
  for (int i = 0; i < iterationCount; i++) {
 | 
			
		||||
    XCTAssertTrue([faceLandmarker detectAsyncInImage:image timestampInMilliseconds:i error:nil]);
 | 
			
		||||
    XCTAssertTrue([faceLandmarker detectAsyncImage:image timestampInMilliseconds:i error:nil]);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  NSTimeInterval timeout = 0.5f;
 | 
			
		||||
| 
						 | 
				
			
			@ -224,10 +224,10 @@ constexpr float kFacialTransformationMatrixErrorThreshold = 0.2f;
 | 
			
		|||
  };
 | 
			
		||||
 | 
			
		||||
  MPPImage *image = [self imageWithFileInfo:kPortraitImage];
 | 
			
		||||
  XCTAssertTrue([faceLandmarker detectAsyncInImage:image timestampInMilliseconds:1 error:nil]);
 | 
			
		||||
  XCTAssertTrue([faceLandmarker detectAsyncImage:image timestampInMilliseconds:1 error:nil]);
 | 
			
		||||
 | 
			
		||||
  NSError *error;
 | 
			
		||||
  XCTAssertFalse([faceLandmarker detectAsyncInImage:image timestampInMilliseconds:0 error:&error]);
 | 
			
		||||
  XCTAssertFalse([faceLandmarker detectAsyncImage:image timestampInMilliseconds:0 error:&error]);
 | 
			
		||||
 | 
			
		||||
  NSError *expectedError =
 | 
			
		||||
      [NSError errorWithDomain:kExpectedErrorDomain
 | 
			
		||||
| 
						 | 
				
			
			@ -292,9 +292,9 @@ constexpr float kFacialTransformationMatrixErrorThreshold = 0.2f;
 | 
			
		|||
  MPPImage *image = [self imageWithFileInfo:kPortraitImage];
 | 
			
		||||
 | 
			
		||||
  NSError *liveStreamAPICallError;
 | 
			
		||||
  XCTAssertFalse([faceLandmarker detectAsyncInImage:image
 | 
			
		||||
                            timestampInMilliseconds:0
 | 
			
		||||
                                              error:&liveStreamAPICallError]);
 | 
			
		||||
  XCTAssertFalse([faceLandmarker detectAsyncImage:image
 | 
			
		||||
                          timestampInMilliseconds:0
 | 
			
		||||
                                            error:&liveStreamAPICallError]);
 | 
			
		||||
 | 
			
		||||
  NSError *expectedLiveStreamAPICallError =
 | 
			
		||||
      [NSError errorWithDomain:kExpectedErrorDomain
 | 
			
		||||
| 
						 | 
				
			
			@ -306,9 +306,9 @@ constexpr float kFacialTransformationMatrixErrorThreshold = 0.2f;
 | 
			
		|||
  AssertEqualErrors(liveStreamAPICallError, expectedLiveStreamAPICallError);
 | 
			
		||||
 | 
			
		||||
  NSError *videoAPICallError;
 | 
			
		||||
  XCTAssertFalse([faceLandmarker detectInVideoFrame:image
 | 
			
		||||
                            timestampInMilliseconds:0
 | 
			
		||||
                                              error:&videoAPICallError]);
 | 
			
		||||
  XCTAssertFalse([faceLandmarker detectVideoFrame:image
 | 
			
		||||
                          timestampInMilliseconds:0
 | 
			
		||||
                                            error:&videoAPICallError]);
 | 
			
		||||
 | 
			
		||||
  NSError *expectedVideoAPICallError =
 | 
			
		||||
      [NSError errorWithDomain:kExpectedErrorDomain
 | 
			
		||||
| 
						 | 
				
			
			@ -329,9 +329,9 @@ constexpr float kFacialTransformationMatrixErrorThreshold = 0.2f;
 | 
			
		|||
 | 
			
		||||
  MPPImage *image = [self imageWithFileInfo:kPortraitImage];
 | 
			
		||||
  NSError *liveStreamAPICallError;
 | 
			
		||||
  XCTAssertFalse([faceLandmarker detectAsyncInImage:image
 | 
			
		||||
                            timestampInMilliseconds:0
 | 
			
		||||
                                              error:&liveStreamAPICallError]);
 | 
			
		||||
  XCTAssertFalse([faceLandmarker detectAsyncImage:image
 | 
			
		||||
                          timestampInMilliseconds:0
 | 
			
		||||
                                            error:&liveStreamAPICallError]);
 | 
			
		||||
 | 
			
		||||
  NSError *expectedLiveStreamAPICallError =
 | 
			
		||||
      [NSError errorWithDomain:kExpectedErrorDomain
 | 
			
		||||
| 
						 | 
				
			
			@ -343,7 +343,7 @@ constexpr float kFacialTransformationMatrixErrorThreshold = 0.2f;
 | 
			
		|||
  AssertEqualErrors(liveStreamAPICallError, expectedLiveStreamAPICallError);
 | 
			
		||||
 | 
			
		||||
  NSError *imageAPICallError;
 | 
			
		||||
  XCTAssertFalse([faceLandmarker detectInImage:image error:&imageAPICallError]);
 | 
			
		||||
  XCTAssertFalse([faceLandmarker detectImage:image error:&imageAPICallError]);
 | 
			
		||||
 | 
			
		||||
  NSError *expectedImageAPICallError =
 | 
			
		||||
      [NSError errorWithDomain:kExpectedErrorDomain
 | 
			
		||||
| 
						 | 
				
			
			@ -365,7 +365,7 @@ constexpr float kFacialTransformationMatrixErrorThreshold = 0.2f;
 | 
			
		|||
  MPPImage *image = [self imageWithFileInfo:kPortraitImage];
 | 
			
		||||
 | 
			
		||||
  NSError *imageAPICallError;
 | 
			
		||||
  XCTAssertFalse([faceLandmarker detectInImage:image error:&imageAPICallError]);
 | 
			
		||||
  XCTAssertFalse([faceLandmarker detectImage:image error:&imageAPICallError]);
 | 
			
		||||
 | 
			
		||||
  NSError *expectedImageAPICallError =
 | 
			
		||||
      [NSError errorWithDomain:kExpectedErrorDomain
 | 
			
		||||
| 
						 | 
				
			
			@ -377,9 +377,9 @@ constexpr float kFacialTransformationMatrixErrorThreshold = 0.2f;
 | 
			
		|||
  AssertEqualErrors(imageAPICallError, expectedImageAPICallError);
 | 
			
		||||
 | 
			
		||||
  NSError *videoAPICallError;
 | 
			
		||||
  XCTAssertFalse([faceLandmarker detectInVideoFrame:image
 | 
			
		||||
                            timestampInMilliseconds:0
 | 
			
		||||
                                              error:&videoAPICallError]);
 | 
			
		||||
  XCTAssertFalse([faceLandmarker detectVideoFrame:image
 | 
			
		||||
                          timestampInMilliseconds:0
 | 
			
		||||
                                            error:&videoAPICallError]);
 | 
			
		||||
 | 
			
		||||
  NSError *expectedVideoAPICallError =
 | 
			
		||||
      [NSError errorWithDomain:kExpectedErrorDomain
 | 
			
		||||
| 
						 | 
				
			
			@ -484,7 +484,7 @@ constexpr float kFacialTransformationMatrixErrorThreshold = 0.2f;
 | 
			
		|||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  if (expectedTransformationMatrix == NULL) {
 | 
			
		||||
  if (expectedTransformationMatrix == nullptr) {
 | 
			
		||||
    XCTAssertEqualObjects(faceLandmarkerResult.facialTransformationMatrixes, [NSArray array]);
 | 
			
		||||
  } else {
 | 
			
		||||
    MPPTransformMatrix *actualTransformationMatrix =
 | 
			
		||||
| 
						 | 
				
			
			@ -539,8 +539,8 @@ constexpr float kFacialTransformationMatrixErrorThreshold = 0.2f;
 | 
			
		|||
  MPPImage *mppImage = [self imageWithFileInfo:fileInfo];
 | 
			
		||||
 | 
			
		||||
  NSError *error;
 | 
			
		||||
  MPPFaceLandmarkerResult *faceLandmarkerResult = [faceLandmarker detectInImage:mppImage
 | 
			
		||||
                                                                          error:&error];
 | 
			
		||||
  MPPFaceLandmarkerResult *faceLandmarkerResult = [faceLandmarker detectImage:mppImage
 | 
			
		||||
                                                                        error:&error];
 | 
			
		||||
  XCTAssertNil(error);
 | 
			
		||||
  XCTAssertNotNil(faceLandmarkerResult);
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -208,10 +208,10 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation";
 | 
			
		|||
  return image;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
- (MPPHandLandmarkerResult *)detectInImageWithFileInfo:(ResourceFileInfo *)imageFileInfo
 | 
			
		||||
                                   usingHandLandmarker:(MPPHandLandmarker *)handLandmarker {
 | 
			
		||||
- (MPPHandLandmarkerResult *)detectImageWithFileInfo:(ResourceFileInfo *)imageFileInfo
 | 
			
		||||
                                 usingHandLandmarker:(MPPHandLandmarker *)handLandmarker {
 | 
			
		||||
  MPPImage *mppImage = [self imageWithFileInfo:imageFileInfo];
 | 
			
		||||
  MPPHandLandmarkerResult *handLandmarkerResult = [handLandmarker detectInImage:mppImage error:nil];
 | 
			
		||||
  MPPHandLandmarkerResult *handLandmarkerResult = [handLandmarker detectImage:mppImage error:nil];
 | 
			
		||||
  XCTAssertNotNil(handLandmarkerResult);
 | 
			
		||||
 | 
			
		||||
  return handLandmarkerResult;
 | 
			
		||||
| 
						 | 
				
			
			@ -221,8 +221,8 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation";
 | 
			
		|||
                             usingHandLandmarker:(MPPHandLandmarker *)handLandmarker
 | 
			
		||||
         approximatelyEqualsHandLandmarkerResult:
 | 
			
		||||
             (MPPHandLandmarkerResult *)expectedHandLandmarkerResult {
 | 
			
		||||
  MPPHandLandmarkerResult *handLandmarkerResult = [self detectInImageWithFileInfo:fileInfo
 | 
			
		||||
                                                              usingHandLandmarker:handLandmarker];
 | 
			
		||||
  MPPHandLandmarkerResult *handLandmarkerResult = [self detectImageWithFileInfo:fileInfo
 | 
			
		||||
                                                            usingHandLandmarker:handLandmarker];
 | 
			
		||||
  [self assertHandLandmarkerResult:handLandmarkerResult
 | 
			
		||||
      isApproximatelyEqualToExpectedResult:expectedHandLandmarkerResult];
 | 
			
		||||
}
 | 
			
		||||
| 
						 | 
				
			
			@ -249,8 +249,8 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation";
 | 
			
		|||
  MPPHandLandmarker *handLandmarker =
 | 
			
		||||
      [self createHandLandmarkerWithOptionsSucceeds:handLandmarkerOptions];
 | 
			
		||||
 | 
			
		||||
  MPPHandLandmarkerResult *handLandmarkerResult = [self detectInImageWithFileInfo:kNoHandsImage
 | 
			
		||||
                                                              usingHandLandmarker:handLandmarker];
 | 
			
		||||
  MPPHandLandmarkerResult *handLandmarkerResult = [self detectImageWithFileInfo:kNoHandsImage
 | 
			
		||||
                                                            usingHandLandmarker:handLandmarker];
 | 
			
		||||
  AssertHandLandmarkerResultIsEmpty(handLandmarkerResult);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -264,8 +264,8 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation";
 | 
			
		|||
  MPPHandLandmarker *handLandmarker =
 | 
			
		||||
      [self createHandLandmarkerWithOptionsSucceeds:handLandmarkerOptions];
 | 
			
		||||
 | 
			
		||||
  MPPHandLandmarkerResult *handLandmarkerResult = [self detectInImageWithFileInfo:kTwoHandsImage
 | 
			
		||||
                                                              usingHandLandmarker:handLandmarker];
 | 
			
		||||
  MPPHandLandmarkerResult *handLandmarkerResult = [self detectImageWithFileInfo:kTwoHandsImage
 | 
			
		||||
                                                            usingHandLandmarker:handLandmarker];
 | 
			
		||||
 | 
			
		||||
  XCTAssertTrue(handLandmarkerResult.handedness.count == numHands);
 | 
			
		||||
}
 | 
			
		||||
| 
						 | 
				
			
			@ -280,7 +280,7 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation";
 | 
			
		|||
  MPPImage *mppImage = [self imageWithFileInfo:kPointingUpRotatedImage
 | 
			
		||||
                                   orientation:UIImageOrientationRight];
 | 
			
		||||
 | 
			
		||||
  MPPHandLandmarkerResult *handLandmarkerResult = [handLandmarker detectInImage:mppImage error:nil];
 | 
			
		||||
  MPPHandLandmarkerResult *handLandmarkerResult = [handLandmarker detectImage:mppImage error:nil];
 | 
			
		||||
 | 
			
		||||
  [self assertHandLandmarkerResult:handLandmarkerResult
 | 
			
		||||
      isApproximatelyEqualToExpectedResult:[MPPHandLandmarkerTests
 | 
			
		||||
| 
						 | 
				
			
			@ -339,9 +339,9 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation";
 | 
			
		|||
  MPPImage *image = [self imageWithFileInfo:kThumbUpImage];
 | 
			
		||||
 | 
			
		||||
  NSError *liveStreamApiCallError;
 | 
			
		||||
  XCTAssertFalse([handLandmarker detectAsyncInImage:image
 | 
			
		||||
                            timestampInMilliseconds:0
 | 
			
		||||
                                              error:&liveStreamApiCallError]);
 | 
			
		||||
  XCTAssertFalse([handLandmarker detectAsyncImage:image
 | 
			
		||||
                          timestampInMilliseconds:0
 | 
			
		||||
                                            error:&liveStreamApiCallError]);
 | 
			
		||||
 | 
			
		||||
  NSError *expectedLiveStreamApiCallError =
 | 
			
		||||
      [NSError errorWithDomain:kExpectedErrorDomain
 | 
			
		||||
| 
						 | 
				
			
			@ -354,9 +354,9 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation";
 | 
			
		|||
  AssertEqualErrors(liveStreamApiCallError, expectedLiveStreamApiCallError);
 | 
			
		||||
 | 
			
		||||
  NSError *videoApiCallError;
 | 
			
		||||
  XCTAssertFalse([handLandmarker detectInVideoFrame:image
 | 
			
		||||
                            timestampInMilliseconds:0
 | 
			
		||||
                                              error:&videoApiCallError]);
 | 
			
		||||
  XCTAssertFalse([handLandmarker detectVideoFrame:image
 | 
			
		||||
                          timestampInMilliseconds:0
 | 
			
		||||
                                            error:&videoApiCallError]);
 | 
			
		||||
 | 
			
		||||
  NSError *expectedVideoApiCallError =
 | 
			
		||||
      [NSError errorWithDomain:kExpectedErrorDomain
 | 
			
		||||
| 
						 | 
				
			
			@ -378,9 +378,9 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation";
 | 
			
		|||
  MPPImage *image = [self imageWithFileInfo:kThumbUpImage];
 | 
			
		||||
 | 
			
		||||
  NSError *liveStreamApiCallError;
 | 
			
		||||
  XCTAssertFalse([handLandmarker detectAsyncInImage:image
 | 
			
		||||
                            timestampInMilliseconds:0
 | 
			
		||||
                                              error:&liveStreamApiCallError]);
 | 
			
		||||
  XCTAssertFalse([handLandmarker detectAsyncImage:image
 | 
			
		||||
                          timestampInMilliseconds:0
 | 
			
		||||
                                            error:&liveStreamApiCallError]);
 | 
			
		||||
 | 
			
		||||
  NSError *expectedLiveStreamApiCallError =
 | 
			
		||||
      [NSError errorWithDomain:kExpectedErrorDomain
 | 
			
		||||
| 
						 | 
				
			
			@ -393,7 +393,7 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation";
 | 
			
		|||
  AssertEqualErrors(liveStreamApiCallError, expectedLiveStreamApiCallError);
 | 
			
		||||
 | 
			
		||||
  NSError *imageApiCallError;
 | 
			
		||||
  XCTAssertFalse([handLandmarker detectInImage:image error:&imageApiCallError]);
 | 
			
		||||
  XCTAssertFalse([handLandmarker detectImage:image error:&imageApiCallError]);
 | 
			
		||||
 | 
			
		||||
  NSError *expectedImageApiCallError =
 | 
			
		||||
      [NSError errorWithDomain:kExpectedErrorDomain
 | 
			
		||||
| 
						 | 
				
			
			@ -416,7 +416,7 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation";
 | 
			
		|||
  MPPImage *image = [self imageWithFileInfo:kThumbUpImage];
 | 
			
		||||
 | 
			
		||||
  NSError *imageApiCallError;
 | 
			
		||||
  XCTAssertFalse([handLandmarker detectInImage:image error:&imageApiCallError]);
 | 
			
		||||
  XCTAssertFalse([handLandmarker detectImage:image error:&imageApiCallError]);
 | 
			
		||||
 | 
			
		||||
  NSError *expectedImageApiCallError =
 | 
			
		||||
      [NSError errorWithDomain:kExpectedErrorDomain
 | 
			
		||||
| 
						 | 
				
			
			@ -428,9 +428,9 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation";
 | 
			
		|||
  AssertEqualErrors(imageApiCallError, expectedImageApiCallError);
 | 
			
		||||
 | 
			
		||||
  NSError *videoApiCallError;
 | 
			
		||||
  XCTAssertFalse([handLandmarker detectInVideoFrame:image
 | 
			
		||||
                            timestampInMilliseconds:0
 | 
			
		||||
                                              error:&videoApiCallError]);
 | 
			
		||||
  XCTAssertFalse([handLandmarker detectVideoFrame:image
 | 
			
		||||
                          timestampInMilliseconds:0
 | 
			
		||||
                                            error:&videoApiCallError]);
 | 
			
		||||
 | 
			
		||||
  NSError *expectedVideoApiCallError =
 | 
			
		||||
      [NSError errorWithDomain:kExpectedErrorDomain
 | 
			
		||||
| 
						 | 
				
			
			@ -452,9 +452,9 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation";
 | 
			
		|||
  MPPImage *image = [self imageWithFileInfo:kThumbUpImage];
 | 
			
		||||
 | 
			
		||||
  for (int i = 0; i < 3; i++) {
 | 
			
		||||
    MPPHandLandmarkerResult *handLandmarkerResult = [handLandmarker detectInVideoFrame:image
 | 
			
		||||
                                                               timestampInMilliseconds:i
 | 
			
		||||
                                                                                 error:nil];
 | 
			
		||||
    MPPHandLandmarkerResult *handLandmarkerResult = [handLandmarker detectVideoFrame:image
 | 
			
		||||
                                                             timestampInMilliseconds:i
 | 
			
		||||
                                                                               error:nil];
 | 
			
		||||
    [self assertHandLandmarkerResult:handLandmarkerResult
 | 
			
		||||
        isApproximatelyEqualToExpectedResult:[MPPHandLandmarkerTests thumbUpHandLandmarkerResult]];
 | 
			
		||||
  }
 | 
			
		||||
| 
						 | 
				
			
			@ -480,10 +480,10 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation";
 | 
			
		|||
 | 
			
		||||
  MPPImage *image = [self imageWithFileInfo:kThumbUpImage];
 | 
			
		||||
 | 
			
		||||
  XCTAssertTrue([handLandmarker detectAsyncInImage:image timestampInMilliseconds:1 error:nil]);
 | 
			
		||||
  XCTAssertTrue([handLandmarker detectAsyncImage:image timestampInMilliseconds:1 error:nil]);
 | 
			
		||||
 | 
			
		||||
  NSError *error;
 | 
			
		||||
  XCTAssertFalse([handLandmarker detectAsyncInImage:image timestampInMilliseconds:0 error:&error]);
 | 
			
		||||
  XCTAssertFalse([handLandmarker detectAsyncImage:image timestampInMilliseconds:0 error:&error]);
 | 
			
		||||
 | 
			
		||||
  NSError *expectedError =
 | 
			
		||||
      [NSError errorWithDomain:kExpectedErrorDomain
 | 
			
		||||
| 
						 | 
				
			
			@ -533,7 +533,7 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation";
 | 
			
		|||
  MPPImage *image = [self imageWithFileInfo:kThumbUpImage];
 | 
			
		||||
 | 
			
		||||
  for (int i = 0; i < iterationCount; i++) {
 | 
			
		||||
    XCTAssertTrue([handLandmarker detectAsyncInImage:image timestampInMilliseconds:i error:nil]);
 | 
			
		||||
    XCTAssertTrue([handLandmarker detectAsyncImage:image timestampInMilliseconds:i error:nil]);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  NSTimeInterval timeout = 0.5f;
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -28,10 +28,10 @@ static const float scoreDifferenceTolerance = 0.02f;
 | 
			
		|||
static NSString *const kLiveStreamTestsDictObjectDetectorKey = @"object_detector";
 | 
			
		||||
static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation";
 | 
			
		||||
 | 
			
		||||
#define AssertEqualErrors(error, expectedError)                                               \
 | 
			
		||||
  XCTAssertNotNil(error);                                                                     \
 | 
			
		||||
  XCTAssertEqualObjects(error.domain, expectedError.domain);                                  \
 | 
			
		||||
  XCTAssertEqual(error.code, expectedError.code);                                             \
 | 
			
		||||
#define AssertEqualErrors(error, expectedError)              \
 | 
			
		||||
  XCTAssertNotNil(error);                                    \
 | 
			
		||||
  XCTAssertEqualObjects(error.domain, expectedError.domain); \
 | 
			
		||||
  XCTAssertEqual(error.code, expectedError.code);            \
 | 
			
		||||
  XCTAssertEqualObjects(error.localizedDescription, expectedError.localizedDescription)
 | 
			
		||||
 | 
			
		||||
#define AssertEqualCategories(category, expectedCategory, detectionIndex, categoryIndex)           \
 | 
			
		||||
| 
						 | 
				
			
			@ -194,7 +194,7 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation";
 | 
			
		|||
                 usingObjectDetector:(MPPObjectDetector *)objectDetector
 | 
			
		||||
                          maxResults:(NSInteger)maxResults
 | 
			
		||||
          equalsObjectDetectorResult:(MPPObjectDetectorResult *)expectedObjectDetectorResult {
 | 
			
		||||
  MPPObjectDetectorResult *ObjectDetectorResult = [objectDetector detectInImage:mppImage error:nil];
 | 
			
		||||
  MPPObjectDetectorResult *ObjectDetectorResult = [objectDetector detectImage:mppImage error:nil];
 | 
			
		||||
 | 
			
		||||
  [self assertObjectDetectorResult:ObjectDetectorResult
 | 
			
		||||
           isEqualToExpectedResult:expectedObjectDetectorResult
 | 
			
		||||
| 
						 | 
				
			
			@ -495,9 +495,9 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation";
 | 
			
		|||
  MPPImage *image = [self imageWithFileInfo:kCatsAndDogsImage];
 | 
			
		||||
 | 
			
		||||
  NSError *liveStreamApiCallError;
 | 
			
		||||
  XCTAssertFalse([objectDetector detectAsyncInImage:image
 | 
			
		||||
                            timestampInMilliseconds:0
 | 
			
		||||
                                              error:&liveStreamApiCallError]);
 | 
			
		||||
  XCTAssertFalse([objectDetector detectAsyncImage:image
 | 
			
		||||
                          timestampInMilliseconds:0
 | 
			
		||||
                                            error:&liveStreamApiCallError]);
 | 
			
		||||
 | 
			
		||||
  NSError *expectedLiveStreamApiCallError =
 | 
			
		||||
      [NSError errorWithDomain:kExpectedErrorDomain
 | 
			
		||||
| 
						 | 
				
			
			@ -510,9 +510,9 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation";
 | 
			
		|||
  AssertEqualErrors(liveStreamApiCallError, expectedLiveStreamApiCallError);
 | 
			
		||||
 | 
			
		||||
  NSError *videoApiCallError;
 | 
			
		||||
  XCTAssertFalse([objectDetector detectInVideoFrame:image
 | 
			
		||||
                            timestampInMilliseconds:0
 | 
			
		||||
                                              error:&videoApiCallError]);
 | 
			
		||||
  XCTAssertFalse([objectDetector detectVideoFrame:image
 | 
			
		||||
                          timestampInMilliseconds:0
 | 
			
		||||
                                            error:&videoApiCallError]);
 | 
			
		||||
 | 
			
		||||
  NSError *expectedVideoApiCallError =
 | 
			
		||||
      [NSError errorWithDomain:kExpectedErrorDomain
 | 
			
		||||
| 
						 | 
				
			
			@ -533,9 +533,9 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation";
 | 
			
		|||
  MPPImage *image = [self imageWithFileInfo:kCatsAndDogsImage];
 | 
			
		||||
 | 
			
		||||
  NSError *liveStreamApiCallError;
 | 
			
		||||
  XCTAssertFalse([objectDetector detectAsyncInImage:image
 | 
			
		||||
                            timestampInMilliseconds:0
 | 
			
		||||
                                              error:&liveStreamApiCallError]);
 | 
			
		||||
  XCTAssertFalse([objectDetector detectAsyncImage:image
 | 
			
		||||
                          timestampInMilliseconds:0
 | 
			
		||||
                                            error:&liveStreamApiCallError]);
 | 
			
		||||
 | 
			
		||||
  NSError *expectedLiveStreamApiCallError =
 | 
			
		||||
      [NSError errorWithDomain:kExpectedErrorDomain
 | 
			
		||||
| 
						 | 
				
			
			@ -548,7 +548,7 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation";
 | 
			
		|||
  AssertEqualErrors(liveStreamApiCallError, expectedLiveStreamApiCallError);
 | 
			
		||||
 | 
			
		||||
  NSError *imageApiCallError;
 | 
			
		||||
  XCTAssertFalse([objectDetector detectInImage:image error:&imageApiCallError]);
 | 
			
		||||
  XCTAssertFalse([objectDetector detectImage:image error:&imageApiCallError]);
 | 
			
		||||
 | 
			
		||||
  NSError *expectedImageApiCallError =
 | 
			
		||||
      [NSError errorWithDomain:kExpectedErrorDomain
 | 
			
		||||
| 
						 | 
				
			
			@ -571,7 +571,7 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation";
 | 
			
		|||
  MPPImage *image = [self imageWithFileInfo:kCatsAndDogsImage];
 | 
			
		||||
 | 
			
		||||
  NSError *imageApiCallError;
 | 
			
		||||
  XCTAssertFalse([objectDetector detectInImage:image error:&imageApiCallError]);
 | 
			
		||||
  XCTAssertFalse([objectDetector detectImage:image error:&imageApiCallError]);
 | 
			
		||||
 | 
			
		||||
  NSError *expectedImageApiCallError =
 | 
			
		||||
      [NSError errorWithDomain:kExpectedErrorDomain
 | 
			
		||||
| 
						 | 
				
			
			@ -583,9 +583,9 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation";
 | 
			
		|||
  AssertEqualErrors(imageApiCallError, expectedImageApiCallError);
 | 
			
		||||
 | 
			
		||||
  NSError *videoApiCallError;
 | 
			
		||||
  XCTAssertFalse([objectDetector detectInVideoFrame:image
 | 
			
		||||
                            timestampInMilliseconds:0
 | 
			
		||||
                                              error:&videoApiCallError]);
 | 
			
		||||
  XCTAssertFalse([objectDetector detectVideoFrame:image
 | 
			
		||||
                          timestampInMilliseconds:0
 | 
			
		||||
                                            error:&videoApiCallError]);
 | 
			
		||||
 | 
			
		||||
  NSError *expectedVideoApiCallError =
 | 
			
		||||
      [NSError errorWithDomain:kExpectedErrorDomain
 | 
			
		||||
| 
						 | 
				
			
			@ -610,9 +610,9 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation";
 | 
			
		|||
  MPPImage *image = [self imageWithFileInfo:kCatsAndDogsImage];
 | 
			
		||||
 | 
			
		||||
  for (int i = 0; i < 3; i++) {
 | 
			
		||||
    MPPObjectDetectorResult *ObjectDetectorResult = [objectDetector detectInVideoFrame:image
 | 
			
		||||
                                                               timestampInMilliseconds:i
 | 
			
		||||
                                                                                 error:nil];
 | 
			
		||||
    MPPObjectDetectorResult *ObjectDetectorResult = [objectDetector detectVideoFrame:image
 | 
			
		||||
                                                             timestampInMilliseconds:i
 | 
			
		||||
                                                                               error:nil];
 | 
			
		||||
 | 
			
		||||
    [self assertObjectDetectorResult:ObjectDetectorResult
 | 
			
		||||
             isEqualToExpectedResult:
 | 
			
		||||
| 
						 | 
				
			
			@ -643,10 +643,10 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation";
 | 
			
		|||
 | 
			
		||||
  MPPImage *image = [self imageWithFileInfo:kCatsAndDogsImage];
 | 
			
		||||
 | 
			
		||||
  XCTAssertTrue([objectDetector detectAsyncInImage:image timestampInMilliseconds:1 error:nil]);
 | 
			
		||||
  XCTAssertTrue([objectDetector detectAsyncImage:image timestampInMilliseconds:1 error:nil]);
 | 
			
		||||
 | 
			
		||||
  NSError *error;
 | 
			
		||||
  XCTAssertFalse([objectDetector detectAsyncInImage:image timestampInMilliseconds:0 error:&error]);
 | 
			
		||||
  XCTAssertFalse([objectDetector detectAsyncImage:image timestampInMilliseconds:0 error:&error]);
 | 
			
		||||
 | 
			
		||||
  NSError *expectedError =
 | 
			
		||||
      [NSError errorWithDomain:kExpectedErrorDomain
 | 
			
		||||
| 
						 | 
				
			
			@ -702,7 +702,7 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation";
 | 
			
		|||
  MPPImage *image = [self imageWithFileInfo:kCatsAndDogsImage];
 | 
			
		||||
 | 
			
		||||
  for (int i = 0; i < iterationCount; i++) {
 | 
			
		||||
    XCTAssertTrue([objectDetector detectAsyncInImage:image timestampInMilliseconds:i error:nil]);
 | 
			
		||||
    XCTAssertTrue([objectDetector detectAsyncImage:image timestampInMilliseconds:i error:nil]);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  NSTimeInterval timeout = 0.5f;
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -100,8 +100,8 @@ NS_SWIFT_NAME(FaceDetector)
 | 
			
		|||
 * system, i.e. in `[0,image_width) x [0,image_height)`, which are the dimensions of the underlying
 | 
			
		||||
 * image data.
 | 
			
		||||
 */
 | 
			
		||||
- (nullable MPPFaceDetectorResult *)detectInImage:(MPPImage *)image
 | 
			
		||||
                                            error:(NSError **)error NS_SWIFT_NAME(detect(image:));
 | 
			
		||||
- (nullable MPPFaceDetectorResult *)detectImage:(MPPImage *)image
 | 
			
		||||
                                          error:(NSError **)error NS_SWIFT_NAME(detect(image:));
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Performs face detection on the provided video frame of type `MPImage` using the whole
 | 
			
		||||
| 
						 | 
				
			
			@ -127,9 +127,9 @@ NS_SWIFT_NAME(FaceDetector)
 | 
			
		|||
 * system, i.e. in `[0,image_width) x [0,image_height)`, which are the dimensions of the underlying
 | 
			
		||||
 * image data.
 | 
			
		||||
 */
 | 
			
		||||
- (nullable MPPFaceDetectorResult *)detectInVideoFrame:(MPPImage *)image
 | 
			
		||||
                               timestampInMilliseconds:(NSInteger)timestampInMilliseconds
 | 
			
		||||
                                                 error:(NSError **)error
 | 
			
		||||
- (nullable MPPFaceDetectorResult *)detectVideoFrame:(MPPImage *)image
 | 
			
		||||
                             timestampInMilliseconds:(NSInteger)timestampInMilliseconds
 | 
			
		||||
                                               error:(NSError **)error
 | 
			
		||||
    NS_SWIFT_NAME(detect(videoFrame:timestampInMilliseconds:));
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
| 
						 | 
				
			
			@ -165,7 +165,7 @@ NS_SWIFT_NAME(FaceDetector)
 | 
			
		|||
 *
 | 
			
		||||
 * @return `true` if the image was sent to the task successfully, otherwise `false`.
 | 
			
		||||
 */
 | 
			
		||||
- (BOOL)detectAsyncInImage:(MPPImage *)image
 | 
			
		||||
- (BOOL)detectAsyncImage:(MPPImage *)image
 | 
			
		||||
    timestampInMilliseconds:(NSInteger)timestampInMilliseconds
 | 
			
		||||
                      error:(NSError **)error
 | 
			
		||||
    NS_SWIFT_NAME(detectAsync(image:timestampInMilliseconds:));
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -130,13 +130,13 @@ static NSString *const kTaskName = @"faceDetector";
 | 
			
		|||
  return [self initWithOptions:options error:error];
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
- (nullable MPPFaceDetectorResult *)detectInImage:(MPPImage *)image error:(NSError **)error {
 | 
			
		||||
- (nullable MPPFaceDetectorResult *)detectImage:(MPPImage *)image error:(NSError **)error {
 | 
			
		||||
  std::optional<PacketMap> outputPacketMap = [_visionTaskRunner processImage:image error:error];
 | 
			
		||||
 | 
			
		||||
  return [MPPFaceDetector faceDetectorResultWithOptionalOutputPacketMap:outputPacketMap];
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
- (nullable MPPFaceDetectorResult *)detectInVideoFrame:(MPPImage *)image
 | 
			
		||||
- (nullable MPPFaceDetectorResult *)detectVideoFrame:(MPPImage *)image
 | 
			
		||||
                               timestampInMilliseconds:(NSInteger)timestampInMilliseconds
 | 
			
		||||
                                                 error:(NSError **)error {
 | 
			
		||||
  std::optional<PacketMap> outputPacketMap =
 | 
			
		||||
| 
						 | 
				
			
			@ -147,7 +147,7 @@ static NSString *const kTaskName = @"faceDetector";
 | 
			
		|||
  return [MPPFaceDetector faceDetectorResultWithOptionalOutputPacketMap:outputPacketMap];
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
- (BOOL)detectAsyncInImage:(MPPImage *)image
 | 
			
		||||
- (BOOL)detectAsyncImage:(MPPImage *)image
 | 
			
		||||
    timestampInMilliseconds:(NSInteger)timestampInMilliseconds
 | 
			
		||||
                      error:(NSError **)error {
 | 
			
		||||
  return [_visionTaskRunner processLiveStreamImage:image
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -71,8 +71,8 @@ NS_SWIFT_NAME(FaceLandmarker)
 | 
			
		|||
 * @return An `FaceLandmarkerResult` that contains a list of landmarks. `nil` if there is an error
 | 
			
		||||
 * in initializing the face landmaker.
 | 
			
		||||
 */
 | 
			
		||||
- (nullable MPPFaceLandmarkerResult *)detectInImage:(MPPImage *)image
 | 
			
		||||
                                              error:(NSError **)error NS_SWIFT_NAME(detect(image:));
 | 
			
		||||
- (nullable MPPFaceLandmarkerResult *)detectImage:(MPPImage *)image
 | 
			
		||||
                                            error:(NSError **)error NS_SWIFT_NAME(detect(image:));
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Performs face landmark detection on the provided video frame of type `MPImage` using the whole
 | 
			
		||||
| 
						 | 
				
			
			@ -95,9 +95,9 @@ NS_SWIFT_NAME(FaceLandmarker)
 | 
			
		|||
 * @return An `FaceLandmarkerResult` that contains a list of landmarks. `nil` if there is an
 | 
			
		||||
 * error in initializing the face landmaker.
 | 
			
		||||
 */
 | 
			
		||||
- (nullable MPPFaceLandmarkerResult *)detectInVideoFrame:(MPPImage *)image
 | 
			
		||||
                                 timestampInMilliseconds:(NSInteger)timestampInMilliseconds
 | 
			
		||||
                                                   error:(NSError **)error
 | 
			
		||||
- (nullable MPPFaceLandmarkerResult *)detectVideoFrame:(MPPImage *)image
 | 
			
		||||
                               timestampInMilliseconds:(NSInteger)timestampInMilliseconds
 | 
			
		||||
                                                 error:(NSError **)error
 | 
			
		||||
    NS_SWIFT_NAME(detect(videoFrame:timestampInMilliseconds:));
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
| 
						 | 
				
			
			@ -132,7 +132,7 @@ NS_SWIFT_NAME(FaceLandmarker)
 | 
			
		|||
 *
 | 
			
		||||
 * @return `true` if the image was sent to the task successfully, otherwise `false`.
 | 
			
		||||
 */
 | 
			
		||||
- (BOOL)detectAsyncInImage:(MPPImage *)image
 | 
			
		||||
- (BOOL)detectAsyncImage:(MPPImage *)image
 | 
			
		||||
    timestampInMilliseconds:(NSInteger)timestampInMilliseconds
 | 
			
		||||
                      error:(NSError **)error
 | 
			
		||||
    NS_SWIFT_NAME(detectAsync(image:timestampInMilliseconds:));
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -154,15 +154,15 @@ static NSString *const kTaskName = @"faceLandmarker";
 | 
			
		|||
  return [self initWithOptions:options error:error];
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
- (nullable MPPFaceLandmarkerResult *)detectInImage:(MPPImage *)image error:(NSError **)error {
 | 
			
		||||
- (nullable MPPFaceLandmarkerResult *)detectImage:(MPPImage *)image error:(NSError **)error {
 | 
			
		||||
  std::optional<PacketMap> outputPacketMap = [_visionTaskRunner processImage:image error:error];
 | 
			
		||||
 | 
			
		||||
  return [MPPFaceLandmarker faceLandmarkerResultWithOptionalOutputPacketMap:outputPacketMap];
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
- (nullable MPPFaceLandmarkerResult *)detectInVideoFrame:(MPPImage *)image
 | 
			
		||||
                                 timestampInMilliseconds:(NSInteger)timestampInMilliseconds
 | 
			
		||||
                                                   error:(NSError **)error {
 | 
			
		||||
- (nullable MPPFaceLandmarkerResult *)detectVideoFrame:(MPPImage *)image
 | 
			
		||||
                               timestampInMilliseconds:(NSInteger)timestampInMilliseconds
 | 
			
		||||
                                                 error:(NSError **)error {
 | 
			
		||||
  std::optional<PacketMap> outputPacketMap =
 | 
			
		||||
      [_visionTaskRunner processVideoFrame:image
 | 
			
		||||
                   timestampInMilliseconds:timestampInMilliseconds
 | 
			
		||||
| 
						 | 
				
			
			@ -171,7 +171,7 @@ static NSString *const kTaskName = @"faceLandmarker";
 | 
			
		|||
  return [MPPFaceLandmarker faceLandmarkerResultWithOptionalOutputPacketMap:outputPacketMap];
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
- (BOOL)detectAsyncInImage:(MPPImage *)image
 | 
			
		||||
- (BOOL)detectAsyncImage:(MPPImage *)image
 | 
			
		||||
    timestampInMilliseconds:(NSInteger)timestampInMilliseconds
 | 
			
		||||
                      error:(NSError **)error {
 | 
			
		||||
  return [_visionTaskRunner processLiveStreamImage:image
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -146,8 +146,8 @@ NS_SWIFT_NAME(HandLandmarker)
 | 
			
		|||
 * @return  An `HandLandmarkerResult` object that contains the hand hand landmarks detection
 | 
			
		||||
 * results.
 | 
			
		||||
 */
 | 
			
		||||
- (nullable MPPHandLandmarkerResult *)detectInImage:(MPPImage *)image
 | 
			
		||||
                                              error:(NSError **)error NS_SWIFT_NAME(detect(image:));
 | 
			
		||||
- (nullable MPPHandLandmarkerResult *)detectImage:(MPPImage *)image
 | 
			
		||||
                                            error:(NSError **)error NS_SWIFT_NAME(detect(image:));
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Performs hand landmarks detection on the provided video frame of type `MPImage` using the whole
 | 
			
		||||
| 
						 | 
				
			
			@ -176,9 +176,9 @@ NS_SWIFT_NAME(HandLandmarker)
 | 
			
		|||
 * @return  An `HandLandmarkerResult` object that contains the hand hand landmarks detection
 | 
			
		||||
 * results.
 | 
			
		||||
 */
 | 
			
		||||
- (nullable MPPHandLandmarkerResult *)detectInVideoFrame:(MPPImage *)image
 | 
			
		||||
                                 timestampInMilliseconds:(NSInteger)timestampInMilliseconds
 | 
			
		||||
                                                   error:(NSError **)error
 | 
			
		||||
- (nullable MPPHandLandmarkerResult *)detectVideoFrame:(MPPImage *)image
 | 
			
		||||
                               timestampInMilliseconds:(NSInteger)timestampInMilliseconds
 | 
			
		||||
                                                 error:(NSError **)error
 | 
			
		||||
    NS_SWIFT_NAME(detect(videoFrame:timestampInMilliseconds:));
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
| 
						 | 
				
			
			@ -216,7 +216,7 @@ NS_SWIFT_NAME(HandLandmarker)
 | 
			
		|||
 *
 | 
			
		||||
 * @return `YES` if the image was sent to the task successfully, otherwise `NO`.
 | 
			
		||||
 */
 | 
			
		||||
- (BOOL)detectAsyncInImage:(MPPImage *)image
 | 
			
		||||
- (BOOL)detectAsyncImage:(MPPImage *)image
 | 
			
		||||
    timestampInMilliseconds:(NSInteger)timestampInMilliseconds
 | 
			
		||||
                      error:(NSError **)error
 | 
			
		||||
    NS_SWIFT_NAME(detectAsync(image:timestampInMilliseconds:));
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -140,13 +140,13 @@ static NSString *const kTaskName = @"handLandmarker";
 | 
			
		|||
  return [self initWithOptions:options error:error];
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
- (nullable MPPHandLandmarkerResult *)detectInImage:(MPPImage *)image error:(NSError **)error {
 | 
			
		||||
- (nullable MPPHandLandmarkerResult *)detectImage:(MPPImage *)image error:(NSError **)error {
 | 
			
		||||
  std::optional<PacketMap> outputPacketMap = [_visionTaskRunner processImage:image error:error];
 | 
			
		||||
 | 
			
		||||
  return [MPPHandLandmarker handLandmarkerResultWithOptionalOutputPacketMap:outputPacketMap];
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
- (nullable MPPHandLandmarkerResult *)detectInVideoFrame:(MPPImage *)image
 | 
			
		||||
- (nullable MPPHandLandmarkerResult *)detectVideoFrame:(MPPImage *)image
 | 
			
		||||
                                 timestampInMilliseconds:(NSInteger)timestampInMilliseconds
 | 
			
		||||
                                                   error:(NSError **)error {
 | 
			
		||||
  std::optional<PacketMap> outputPacketMap =
 | 
			
		||||
| 
						 | 
				
			
			@ -157,7 +157,7 @@ static NSString *const kTaskName = @"handLandmarker";
 | 
			
		|||
  return [MPPHandLandmarker handLandmarkerResultWithOptionalOutputPacketMap:outputPacketMap];
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
- (BOOL)detectAsyncInImage:(MPPImage *)image
 | 
			
		||||
- (BOOL)detectAsyncImage:(MPPImage *)image
 | 
			
		||||
    timestampInMilliseconds:(NSInteger)timestampInMilliseconds
 | 
			
		||||
                      error:(NSError **)error {
 | 
			
		||||
  return [_visionTaskRunner processLiveStreamImage:image
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -112,8 +112,8 @@ NS_SWIFT_NAME(ObjectDetector)
 | 
			
		|||
 * system, i.e. in `[0,image_width) x [0,image_height)`, which are the dimensions of the underlying
 | 
			
		||||
 * image data.
 | 
			
		||||
 */
 | 
			
		||||
- (nullable MPPObjectDetectorResult *)detectInImage:(MPPImage *)image
 | 
			
		||||
                                              error:(NSError **)error NS_SWIFT_NAME(detect(image:));
 | 
			
		||||
- (nullable MPPObjectDetectorResult *)detectImage:(MPPImage *)image
 | 
			
		||||
                                            error:(NSError **)error NS_SWIFT_NAME(detect(image:));
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Performs object detection on the provided video frame of type `MPImage` using the whole
 | 
			
		||||
| 
						 | 
				
			
			@ -138,9 +138,9 @@ NS_SWIFT_NAME(ObjectDetector)
 | 
			
		|||
 * system, i.e. in `[0,image_width) x [0,image_height)`, which are the dimensions of the underlying
 | 
			
		||||
 * image data.
 | 
			
		||||
 */
 | 
			
		||||
- (nullable MPPObjectDetectorResult *)detectInVideoFrame:(MPPImage *)image
 | 
			
		||||
                                 timestampInMilliseconds:(NSInteger)timestampInMilliseconds
 | 
			
		||||
                                                   error:(NSError **)error
 | 
			
		||||
- (nullable MPPObjectDetectorResult *)detectVideoFrame:(MPPImage *)image
 | 
			
		||||
                               timestampInMilliseconds:(NSInteger)timestampInMilliseconds
 | 
			
		||||
                                                 error:(NSError **)error
 | 
			
		||||
    NS_SWIFT_NAME(detect(videoFrame:timestampInMilliseconds:));
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
| 
						 | 
				
			
			@ -176,7 +176,7 @@ NS_SWIFT_NAME(ObjectDetector)
 | 
			
		|||
 *
 | 
			
		||||
 * @return `true` if the image was sent to the task successfully, otherwise `false`.
 | 
			
		||||
 */
 | 
			
		||||
- (BOOL)detectAsyncInImage:(MPPImage *)image
 | 
			
		||||
- (BOOL)detectAsyncImage:(MPPImage *)image
 | 
			
		||||
    timestampInMilliseconds:(NSInteger)timestampInMilliseconds
 | 
			
		||||
                      error:(NSError **)error
 | 
			
		||||
    NS_SWIFT_NAME(detectAsync(image:timestampInMilliseconds:));
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -128,13 +128,13 @@ static NSString *const kTaskName = @"objectDetector";
 | 
			
		|||
  return [self initWithOptions:options error:error];
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
- (nullable MPPObjectDetectorResult *)detectInImage:(MPPImage *)image error:(NSError **)error {
 | 
			
		||||
- (nullable MPPObjectDetectorResult *)detectImage:(MPPImage *)image error:(NSError **)error {
 | 
			
		||||
  std::optional<PacketMap> outputPacketMap = [_visionTaskRunner processImage:image error:error];
 | 
			
		||||
 | 
			
		||||
  return [MPPObjectDetector objectDetectorResultWithOptionalOutputPacketMap:outputPacketMap];
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
- (nullable MPPObjectDetectorResult *)detectInVideoFrame:(MPPImage *)image
 | 
			
		||||
- (nullable MPPObjectDetectorResult *)detectVideoFrame:(MPPImage *)image
 | 
			
		||||
                                 timestampInMilliseconds:(NSInteger)timestampInMilliseconds
 | 
			
		||||
                                                   error:(NSError **)error {
 | 
			
		||||
  std::optional<PacketMap> outputPacketMap =
 | 
			
		||||
| 
						 | 
				
			
			@ -145,7 +145,7 @@ static NSString *const kTaskName = @"objectDetector";
 | 
			
		|||
  return [MPPObjectDetector objectDetectorResultWithOptionalOutputPacketMap:outputPacketMap];
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
- (BOOL)detectAsyncInImage:(MPPImage *)image
 | 
			
		||||
- (BOOL)detectAsyncImage:(MPPImage *)image
 | 
			
		||||
    timestampInMilliseconds:(NSInteger)timestampInMilliseconds
 | 
			
		||||
                      error:(NSError **)error {
 | 
			
		||||
  return [_visionTaskRunner processLiveStreamImage:image
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
		Loading…
	
		Reference in New Issue
	
	Block a user