diff --git a/mediapipe/tasks/ios/test/vision/face_detector/MPPFaceDetectorTests.mm b/mediapipe/tasks/ios/test/vision/face_detector/MPPFaceDetectorTests.mm index 1976bf603..548c4bdbf 100644 --- a/mediapipe/tasks/ios/test/vision/face_detector/MPPFaceDetectorTests.mm +++ b/mediapipe/tasks/ios/test/vision/face_detector/MPPFaceDetectorTests.mm @@ -25,7 +25,7 @@ static NSDictionary *const kPortraitImage = @{@"name" : @"portrait", @"type" : @"jpg", @"orientation" : @(UIImageOrientationUp)}; static NSDictionary *const kPortraitRotatedImage = - @{@"name" : @"portrait_rotated", @"type" : @"jpg", @"orientation" : @(UIImageOrientationRight)}; + @{@"name" : @"portrait_rotated", @"type" : @"jpg", @"orientation" : @(UIImageOrientationLeft)}; static NSDictionary *const kCatImage = @{@"name" : @"cat", @"type" : @"jpg"}; static NSString *const kShortRangeBlazeFaceModel = @"face_detection_short_range"; static NSArray *const kPortraitExpectedKeypoints = @[ diff --git a/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m b/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m index 8fbcb6b49..4b4eceed6 100644 --- a/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m +++ b/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m @@ -343,7 +343,7 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; MPPGestureRecognizer *gestureRecognizer = [self createGestureRecognizerWithOptionsSucceeds:gestureRecognizerOptions]; MPPImage *mppImage = [self imageWithFileInfo:kPointingUpRotatedImage - orientation:UIImageOrientationRight]; + orientation:UIImageOrientationLeft]; MPPGestureRecognizerResult *gestureRecognizerResult = [gestureRecognizer recognizeImage:mppImage error:nil]; diff --git a/mediapipe/tasks/ios/test/vision/image_classifier/MPPImageClassifierTests.m b/mediapipe/tasks/ios/test/vision/image_classifier/MPPImageClassifierTests.m index c08976923..e1bd9f6c3 100644 --- a/mediapipe/tasks/ios/test/vision/image_classifier/MPPImageClassifierTests.m +++ b/mediapipe/tasks/ios/test/vision/image_classifier/MPPImageClassifierTests.m @@ -402,7 +402,7 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; ]; MPPImage *image = [self imageWithFileInfo:kBurgerRotatedImage - orientation:UIImageOrientationRight]; + orientation:UIImageOrientationLeft]; [self assertResultsOfClassifyImage:image usingImageClassifier:imageClassifier @@ -425,7 +425,7 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; displayName:nil] ]; MPPImage *image = [self imageWithFileInfo:kMultiObjectsRotatedImage - orientation:UIImageOrientationRight]; + orientation:UIImageOrientationLeft]; // roi around folding chair MPPImageClassifierResult *imageClassifierResult = diff --git a/mediapipe/tasks/ios/test/vision/object_detector/MPPObjectDetectorTests.m b/mediapipe/tasks/ios/test/vision/object_detector/MPPObjectDetectorTests.m index 2ef5a0957..079682df1 100644 --- a/mediapipe/tasks/ios/test/vision/object_detector/MPPObjectDetectorTests.m +++ b/mediapipe/tasks/ios/test/vision/object_detector/MPPObjectDetectorTests.m @@ -438,7 +438,7 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; [[MPPObjectDetectorResult alloc] initWithDetections:detections timestampInMilliseconds:0]; MPPImage *image = [self imageWithFileInfo:kCatsAndDogsRotatedImage - orientation:UIImageOrientationRight]; + orientation:UIImageOrientationLeft]; [self assertResultsOfDetectInImage:image usingObjectDetector:objectDetector diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPImage.h b/mediapipe/tasks/ios/vision/core/sources/MPPImage.h index deffc97e2..847efc331 100644 --- a/mediapipe/tasks/ios/vision/core/sources/MPPImage.h +++ b/mediapipe/tasks/ios/vision/core/sources/MPPImage.h @@ -62,10 +62,10 @@ NS_SWIFT_NAME(MPImage) /** * Initializes an `MPPImage` object with the given `UIImage`. - * The orientation of the newly created `MPPImage` will be `UIImageOrientationUp`. - * Hence, if this image is used as input for any MediaPipe vision tasks, inference will be - * performed on the it without any rotation. To create an `MPPImage` with a different orientation, - * please use `[MPPImage initWithImage:orientation:error:]`. + * The orientation of the newly created `MPPImage` will be equal to the `imageOrientation` of + * `UIImage` and when sent to the vision tasks for inference, rotation will be applied accordingly. + * To create an `MPPImage` with an orientation different from its `imageOrientation`, please use + * `[MPPImage initWithImage:orientation:error:]`. * * @param image The image to use as the source. Its `CGImage` property must not be `NULL`. * @param error An optional error parameter populated when there is an error in initializing the @@ -77,14 +77,19 @@ NS_SWIFT_NAME(MPImage) - (nullable instancetype)initWithUIImage:(UIImage *)image error:(NSError **)error; /** - * Initializes an `MPPImage` object with the given `UIImabe` and orientation. + * Initializes an `MPPImage` object with the given `UIImage` and orientation. The given orientation + * will be used to calculate the rotation to be applied to the `UIImage` before inference is + * performed on it by the vision tasks. The `imageOrientation` stored in the `UIImage` is ignored + * when `MPImage` objects created by this method are sent to the vision tasks for inference. Use + * `[MPPImage initWithImage:orientation:error:]` to initialize images with the `imageOrientation` of + * `UIImage`. * * If the newly created `MPPImage` is used as input for any MediaPipe vision tasks, inference * will be performed on a copy of the image rotated according to the orientation. * * @param image The image to use as the source. Its `CGImage` property must not be `NULL`. * @param orientation The display orientation of the image. This will be stored in the property - * `orientation`. `MPPImage`. + * `orientation` `MPPImage` and will override the `imageOrientation` of the passed in `UIImage`. * @param error An optional error parameter populated when there is an error in initializing the * `MPPImage`. * diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunner.mm b/mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunner.mm index cba8a63ff..ae5e1d64c 100644 --- a/mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunner.mm +++ b/mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunner.mm @@ -30,13 +30,13 @@ using ::mediapipe::tasks::core::PacketsCallback; } // namespace /** Rotation degrees for a 90 degree rotation to the right. */ -static const NSInteger kMPPOrientationDegreesRight = -90; +static const NSInteger kMPPOrientationDegreesRight = -270; /** Rotation degrees for a 180 degree rotation. */ static const NSInteger kMPPOrientationDegreesDown = -180; /** Rotation degrees for a 90 degree rotation to the left. */ -static const NSInteger kMPPOrientationDegreesLeft = -270; +static const NSInteger kMPPOrientationDegreesLeft = -90; static NSString *const kTaskPrefix = @"com.mediapipe.tasks.vision";