From 900e637b6ae0ece8848dfe5bebe05b8ad0ca1cf9 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Fri, 8 Sep 2023 19:15:32 +0530 Subject: [PATCH] Fixed typos in iOS documentation --- .../components/containers/sources/MPPCategory.h | 6 +++--- .../vision/face_detector/sources/MPPFaceDetector.h | 5 ++--- .../face_landmarker/sources/MPPFaceLandmarker.h | 13 +++++-------- .../image_classifier/sources/MPPImageClassifier.h | 14 +++++++------- .../sources/MPPImageClassifierOptions.h | 3 +-- .../object_detector/sources/MPPObjectDetector.h | 5 ++--- .../sources/MPPObjectDetectorOptions.h | 3 +-- 7 files changed, 21 insertions(+), 28 deletions(-) diff --git a/mediapipe/tasks/ios/components/containers/sources/MPPCategory.h b/mediapipe/tasks/ios/components/containers/sources/MPPCategory.h index 5753c4d3f..61c9e832f 100644 --- a/mediapipe/tasks/ios/components/containers/sources/MPPCategory.h +++ b/mediapipe/tasks/ios/components/containers/sources/MPPCategory.h @@ -44,15 +44,15 @@ NS_SWIFT_NAME(ResultCategory) @property(nonatomic, readonly, nullable) NSString *displayName; /** - * Initializes a new `Category` with the given index, score, category name and display name. + * Initializes a new `ResultCategory` with the given index, score, category name and display name. * * @param index The index of the label in the corresponding label file. * @param score The probability score of this label category. * @param categoryName The label of this category object. * @param displayName The display name of the label. * - * @return An instance of `Category` initialized with the given index, score, category name and - * display name. + * @return An instance of `ResultCategory` initialized with the given index, score, category name + * and display name. */ - (instancetype)initWithIndex:(NSInteger)index score:(float)score diff --git a/mediapipe/tasks/ios/vision/face_detector/sources/MPPFaceDetector.h b/mediapipe/tasks/ios/vision/face_detector/sources/MPPFaceDetector.h index 3dec361a6..8adb40679 100644 --- a/mediapipe/tasks/ios/vision/face_detector/sources/MPPFaceDetector.h +++ b/mediapipe/tasks/ios/vision/face_detector/sources/MPPFaceDetector.h @@ -80,10 +80,9 @@ NS_SWIFT_NAME(FaceDetector) error:(NSError **)error NS_DESIGNATED_INITIALIZER; /** - * Performs face detection on the provided MPPImage using the whole image as region of + * Performs face detection on the provided `MPImage` using the whole image as region of * interest. Rotation will be applied according to the `orientation` property of the provided - * `MPImage`. Only use this method when the `MPPFaceDetector` is created with running mode - * `.image`. + * `MPImage`. Only use this method when the `FaceDetector` is created with running mode `.image`. * * This method supports classification of RGBA images. If your `MPImage` has a source type of * `.pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the diff --git a/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarker.h b/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarker.h index 6c5c37512..9a22fe30c 100644 --- a/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarker.h +++ b/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarker.h @@ -43,8 +43,7 @@ NS_SWIFT_NAME(FaceLandmarker) /** * Creates a new instance of `FaceLandmarker` from the given `FaceLandmarkerOptions`. * - * @param options The options of type `FaceLandmarkerOptions` to use for configuring the - * `MPPFaceLandmarker`. + * @param options The options of type `FaceLandmarkerOptions` to use for configuring the `FaceLandmarker`. * * @return A new instance of `FaceLandmarker` with the given options. `nil` if there is an error * in initializing the face landmaker. @@ -53,11 +52,11 @@ NS_SWIFT_NAME(FaceLandmarker) error:(NSError **)error NS_DESIGNATED_INITIALIZER; /** - * Performs face landmark detection on the provided MPPImage using the whole image as region of + * Performs face landmark detection on the provided `MPImage` using the whole image as region of * interest. Rotation will be applied according to the `orientation` property of the provided * `MPImage`. Only use this method when the `FaceLandmarker` is created with `.image`. * - * This method supports RGBA images. If your `MPPImage` has a source type of `.pixelBuffer` or + * This method supports RGBA images. If your `MPImage` has a source type of `.pixelBuffer` or * `.sampleBuffer`, the underlying pixel buffer must have one of the following pixel format * types: * 1. kCVPixelFormatType_32BGRA @@ -68,8 +67,7 @@ NS_SWIFT_NAME(FaceLandmarker) * * @param image The `MPImage` on which face landmark detection is to be performed. * - * @return An `MPPFaceLandmarkerResult` that contains a list of landmarks. `nil` if there is an - * error in initializing the face landmaker. + * @return An `FaceLandmarkerResult` that contains a list of landmarks. `nil` if there is an error in initializing the face landmaker. */ - (nullable MPPFaceLandmarkerResult *)detectInImage:(MPPImage *)image error:(NSError **)error NS_SWIFT_NAME(detect(image:)); @@ -77,8 +75,7 @@ NS_SWIFT_NAME(FaceLandmarker) /** * Performs face landmark detection on the provided video frame of type `MPImage` using the whole * image as region of interest. Rotation will be applied according to the `orientation` property of - * the provided `MPImage`. Only use this method when the `MPPFaceLandmarker` is created with - * running mode `.video`. + * the provided `MPImage`. Only use this method when the `FaceLandmarker` is created with running mode `.video`. * * This method supports RGBA images. If your `MPImage` has a source type of `.pixelBuffer` or * `.sampleBuffer`, the underlying pixel buffer must have one of the following pixel format types: diff --git a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.h b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.h index a22dc632d..5b9b24fb6 100644 --- a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.h +++ b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.h @@ -76,7 +76,7 @@ NS_SWIFT_NAME(ImageClassifier) error:(NSError **)error NS_DESIGNATED_INITIALIZER; /** - * Performs image classification on the provided MPPImage using the whole image as region of + * Performs image classification on the provided `MPImage` using the whole image as region of * interest. Rotation will be applied according to the `orientation` property of the provided * `MPImage`. Only use this method when the `ImageClassifier` is created with running mode, * `.image`. @@ -90,7 +90,7 @@ NS_SWIFT_NAME(ImageClassifier) * If your `MPImage` has a source type of `.image` ensure that the color space is RGB with an Alpha * channel. * - * @param image The `MPPImage` on which image classification is to be performed. + * @param image The `MPImage` on which image classification is to be performed. * * @return An `ImageClassifierResult` object that contains a list of image classifications. */ @@ -101,7 +101,7 @@ NS_SWIFT_NAME(ImageClassifier) /** * Performs image classification on the provided `MPImage` cropped to the specified region of * interest. Rotation will be applied on the cropped image according to the `orientation` property - * of the provided `MPImage`. Only use this method when the `MPPImageClassifier` is created with + * of the provided `MPImage`. Only use this method when the `ImageClassifier` is created with * running mode, `.image`. * * This method supports classification of RGBA images. If your `MPImage` has a source type of @@ -127,7 +127,7 @@ NS_SWIFT_NAME(ImageClassifier) /** * Performs image classification on the provided video frame of type `MPImage` using the whole * image as region of interest. Rotation will be applied according to the `orientation` property of - * the provided `MPImage`. Only use this method when the `MPPImageClassifier` is created with + * the provided `MPImage`. Only use this method when the `ImageClassifier` is created with * running mode `.video`. * * It's required to provide the video frame's timestamp (in milliseconds). The input timestamps must @@ -142,7 +142,7 @@ NS_SWIFT_NAME(ImageClassifier) * If your `MPImage` has a source type of `.image` ensure that the color space is RGB with an Alpha * channel. * - * @param image The `MPPImage` on which image classification is to be performed. + * @param image The `MPImage` on which image classification is to be performed. * @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input * timestamps must be monotonically increasing. * @@ -188,8 +188,8 @@ NS_SWIFT_NAME(ImageClassifier) /** * Sends live stream image data of type `MPImage` to perform image classification using the whole * image as region of interest. Rotation will be applied according to the `orientation` property of - * the provided `MPImage`. Only use this method when the `ImageClassifier` is created with - * `MPPRunningModeLiveStream`. + * the provided `MPImage`. Only use this method when the `ImageClassifier` is created with running + * mode `.liveStream`. * * The object which needs to be continuously notified of the available results of image * classification must confirm to `ImageClassifierLiveStreamDelegate` protocol and implement the diff --git a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierOptions.h b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierOptions.h index 72f8859b5..bfd136883 100644 --- a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierOptions.h +++ b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierOptions.h @@ -24,8 +24,7 @@ NS_ASSUME_NONNULL_BEGIN /** * This protocol defines an interface for the delegates of `ImageClassifier` object to receive - * results of asynchronous classification of images (i.e, when `runningMode = - * .liveStream`). + * results of asynchronous classification of images (i.e, when `runningMode` = `.liveStream`). * * The delegate of `ImageClassifier` must adopt `ImageClassifierLiveStreamDelegate` protocol. * The methods in this protocol are optional. diff --git a/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetector.h b/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetector.h index 851e8a355..82721f47b 100644 --- a/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetector.h +++ b/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetector.h @@ -118,8 +118,7 @@ NS_SWIFT_NAME(ObjectDetector) /** * Performs object detection on the provided video frame of type `MPImage` using the whole * image as region of interest. Rotation will be applied according to the `orientation` property of - * the provided `MPImage`. Only use this method when the `MPPObjectDetector` is created with - * `.video`. + * the provided `MPImage`. Only use this method when the `ObjectDetector` is created with `.video`. * * This method supports detecting objects in of RGBA images. If your `MPImage` has a source type of * .pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the following @@ -170,7 +169,7 @@ NS_SWIFT_NAME(ObjectDetector) * that you request `AVCaptureVideoDataOutput` to output frames in `kCMPixelFormat_32RGBA` using its * `videoSettings` property. * - * @param image A live stream image data of type `MPPImage` on which object detection is to be + * @param image A live stream image data of type `MPImage` on which object detection is to be * performed. * @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input * image is sent to the object detector. The input timestamps must be monotonically increasing. diff --git a/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetectorOptions.h b/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetectorOptions.h index 0060d3749..d31805a36 100644 --- a/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetectorOptions.h +++ b/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetectorOptions.h @@ -79,8 +79,7 @@ NS_SWIFT_NAME(ObjectDetectorOptions) * An object that confirms to `ObjectDetectorLiveStreamDelegate` protocol. This object must * implement `objectDetector(_:didFinishDetectionWithResult:timestampInMilliseconds:error:)` to * receive the results of performing asynchronous object detection on images (i.e, when - * `runningMode` = - * `.liveStream`). + * `runningMode` = `.liveStream`). */ @property(nonatomic, weak, nullable) id objectDetectorLiveStreamDelegate;