Fixed typos in iOS documentation

This commit is contained in:
Prianka Liz Kariat 2023-09-08 19:15:32 +05:30
parent 886a118232
commit 900e637b6a
7 changed files with 21 additions and 28 deletions

View File

@ -44,15 +44,15 @@ NS_SWIFT_NAME(ResultCategory)
@property(nonatomic, readonly, nullable) NSString *displayName; @property(nonatomic, readonly, nullable) NSString *displayName;
/** /**
* Initializes a new `Category` with the given index, score, category name and display name. * Initializes a new `ResultCategory` with the given index, score, category name and display name.
* *
* @param index The index of the label in the corresponding label file. * @param index The index of the label in the corresponding label file.
* @param score The probability score of this label category. * @param score The probability score of this label category.
* @param categoryName The label of this category object. * @param categoryName The label of this category object.
* @param displayName The display name of the label. * @param displayName The display name of the label.
* *
* @return An instance of `Category` initialized with the given index, score, category name and * @return An instance of `ResultCategory` initialized with the given index, score, category name
* display name. * and display name.
*/ */
- (instancetype)initWithIndex:(NSInteger)index - (instancetype)initWithIndex:(NSInteger)index
score:(float)score score:(float)score

View File

@ -80,10 +80,9 @@ NS_SWIFT_NAME(FaceDetector)
error:(NSError **)error NS_DESIGNATED_INITIALIZER; error:(NSError **)error NS_DESIGNATED_INITIALIZER;
/** /**
* Performs face detection on the provided MPPImage using the whole image as region of * Performs face detection on the provided `MPImage` using the whole image as region of
* interest. Rotation will be applied according to the `orientation` property of the provided * interest. Rotation will be applied according to the `orientation` property of the provided
* `MPImage`. Only use this method when the `MPPFaceDetector` is created with running mode * `MPImage`. Only use this method when the `FaceDetector` is created with running mode `.image`.
* `.image`.
* *
* This method supports classification of RGBA images. If your `MPImage` has a source type of * This method supports classification of RGBA images. If your `MPImage` has a source type of
* `.pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the * `.pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the

View File

@ -43,8 +43,7 @@ NS_SWIFT_NAME(FaceLandmarker)
/** /**
* Creates a new instance of `FaceLandmarker` from the given `FaceLandmarkerOptions`. * Creates a new instance of `FaceLandmarker` from the given `FaceLandmarkerOptions`.
* *
* @param options The options of type `FaceLandmarkerOptions` to use for configuring the * @param options The options of type `FaceLandmarkerOptions` to use for configuring the `FaceLandmarker`.
* `MPPFaceLandmarker`.
* *
* @return A new instance of `FaceLandmarker` with the given options. `nil` if there is an error * @return A new instance of `FaceLandmarker` with the given options. `nil` if there is an error
* in initializing the face landmaker. * in initializing the face landmaker.
@ -53,11 +52,11 @@ NS_SWIFT_NAME(FaceLandmarker)
error:(NSError **)error NS_DESIGNATED_INITIALIZER; error:(NSError **)error NS_DESIGNATED_INITIALIZER;
/** /**
* Performs face landmark detection on the provided MPPImage using the whole image as region of * Performs face landmark detection on the provided `MPImage` using the whole image as region of
* interest. Rotation will be applied according to the `orientation` property of the provided * interest. Rotation will be applied according to the `orientation` property of the provided
* `MPImage`. Only use this method when the `FaceLandmarker` is created with `.image`. * `MPImage`. Only use this method when the `FaceLandmarker` is created with `.image`.
* *
* This method supports RGBA images. If your `MPPImage` has a source type of `.pixelBuffer` or * This method supports RGBA images. If your `MPImage` has a source type of `.pixelBuffer` or
* `.sampleBuffer`, the underlying pixel buffer must have one of the following pixel format * `.sampleBuffer`, the underlying pixel buffer must have one of the following pixel format
* types: * types:
* 1. kCVPixelFormatType_32BGRA * 1. kCVPixelFormatType_32BGRA
@ -68,8 +67,7 @@ NS_SWIFT_NAME(FaceLandmarker)
* *
* @param image The `MPImage` on which face landmark detection is to be performed. * @param image The `MPImage` on which face landmark detection is to be performed.
* *
* @return An `MPPFaceLandmarkerResult` that contains a list of landmarks. `nil` if there is an * @return An `FaceLandmarkerResult` that contains a list of landmarks. `nil` if there is an error in initializing the face landmaker.
* error in initializing the face landmaker.
*/ */
- (nullable MPPFaceLandmarkerResult *)detectInImage:(MPPImage *)image - (nullable MPPFaceLandmarkerResult *)detectInImage:(MPPImage *)image
error:(NSError **)error NS_SWIFT_NAME(detect(image:)); error:(NSError **)error NS_SWIFT_NAME(detect(image:));
@ -77,8 +75,7 @@ NS_SWIFT_NAME(FaceLandmarker)
/** /**
* Performs face landmark detection on the provided video frame of type `MPImage` using the whole * Performs face landmark detection on the provided video frame of type `MPImage` using the whole
* image as region of interest. Rotation will be applied according to the `orientation` property of * image as region of interest. Rotation will be applied according to the `orientation` property of
* the provided `MPImage`. Only use this method when the `MPPFaceLandmarker` is created with * the provided `MPImage`. Only use this method when the `FaceLandmarker` is created with running mode `.video`.
* running mode `.video`.
* *
* This method supports RGBA images. If your `MPImage` has a source type of `.pixelBuffer` or * This method supports RGBA images. If your `MPImage` has a source type of `.pixelBuffer` or
* `.sampleBuffer`, the underlying pixel buffer must have one of the following pixel format types: * `.sampleBuffer`, the underlying pixel buffer must have one of the following pixel format types:

View File

@ -76,7 +76,7 @@ NS_SWIFT_NAME(ImageClassifier)
error:(NSError **)error NS_DESIGNATED_INITIALIZER; error:(NSError **)error NS_DESIGNATED_INITIALIZER;
/** /**
* Performs image classification on the provided MPPImage using the whole image as region of * Performs image classification on the provided `MPImage` using the whole image as region of
* interest. Rotation will be applied according to the `orientation` property of the provided * interest. Rotation will be applied according to the `orientation` property of the provided
* `MPImage`. Only use this method when the `ImageClassifier` is created with running mode, * `MPImage`. Only use this method when the `ImageClassifier` is created with running mode,
* `.image`. * `.image`.
@ -90,7 +90,7 @@ NS_SWIFT_NAME(ImageClassifier)
* If your `MPImage` has a source type of `.image` ensure that the color space is RGB with an Alpha * If your `MPImage` has a source type of `.image` ensure that the color space is RGB with an Alpha
* channel. * channel.
* *
* @param image The `MPPImage` on which image classification is to be performed. * @param image The `MPImage` on which image classification is to be performed.
* *
* @return An `ImageClassifierResult` object that contains a list of image classifications. * @return An `ImageClassifierResult` object that contains a list of image classifications.
*/ */
@ -101,7 +101,7 @@ NS_SWIFT_NAME(ImageClassifier)
/** /**
* Performs image classification on the provided `MPImage` cropped to the specified region of * Performs image classification on the provided `MPImage` cropped to the specified region of
* interest. Rotation will be applied on the cropped image according to the `orientation` property * interest. Rotation will be applied on the cropped image according to the `orientation` property
* of the provided `MPImage`. Only use this method when the `MPPImageClassifier` is created with * of the provided `MPImage`. Only use this method when the `ImageClassifier` is created with
* running mode, `.image`. * running mode, `.image`.
* *
* This method supports classification of RGBA images. If your `MPImage` has a source type of * This method supports classification of RGBA images. If your `MPImage` has a source type of
@ -127,7 +127,7 @@ NS_SWIFT_NAME(ImageClassifier)
/** /**
* Performs image classification on the provided video frame of type `MPImage` using the whole * Performs image classification on the provided video frame of type `MPImage` using the whole
* image as region of interest. Rotation will be applied according to the `orientation` property of * image as region of interest. Rotation will be applied according to the `orientation` property of
* the provided `MPImage`. Only use this method when the `MPPImageClassifier` is created with * the provided `MPImage`. Only use this method when the `ImageClassifier` is created with
* running mode `.video`. * running mode `.video`.
* *
* It's required to provide the video frame's timestamp (in milliseconds). The input timestamps must * It's required to provide the video frame's timestamp (in milliseconds). The input timestamps must
@ -142,7 +142,7 @@ NS_SWIFT_NAME(ImageClassifier)
* If your `MPImage` has a source type of `.image` ensure that the color space is RGB with an Alpha * If your `MPImage` has a source type of `.image` ensure that the color space is RGB with an Alpha
* channel. * channel.
* *
* @param image The `MPPImage` on which image classification is to be performed. * @param image The `MPImage` on which image classification is to be performed.
* @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input * @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input
* timestamps must be monotonically increasing. * timestamps must be monotonically increasing.
* *
@ -188,8 +188,8 @@ NS_SWIFT_NAME(ImageClassifier)
/** /**
* Sends live stream image data of type `MPImage` to perform image classification using the whole * Sends live stream image data of type `MPImage` to perform image classification using the whole
* image as region of interest. Rotation will be applied according to the `orientation` property of * image as region of interest. Rotation will be applied according to the `orientation` property of
* the provided `MPImage`. Only use this method when the `ImageClassifier` is created with * the provided `MPImage`. Only use this method when the `ImageClassifier` is created with running
* `MPPRunningModeLiveStream`. * mode `.liveStream`.
* *
* The object which needs to be continuously notified of the available results of image * The object which needs to be continuously notified of the available results of image
* classification must confirm to `ImageClassifierLiveStreamDelegate` protocol and implement the * classification must confirm to `ImageClassifierLiveStreamDelegate` protocol and implement the

View File

@ -24,8 +24,7 @@ NS_ASSUME_NONNULL_BEGIN
/** /**
* This protocol defines an interface for the delegates of `ImageClassifier` object to receive * This protocol defines an interface for the delegates of `ImageClassifier` object to receive
* results of asynchronous classification of images (i.e, when `runningMode = * results of asynchronous classification of images (i.e, when `runningMode` = `.liveStream`).
* .liveStream`).
* *
* The delegate of `ImageClassifier` must adopt `ImageClassifierLiveStreamDelegate` protocol. * The delegate of `ImageClassifier` must adopt `ImageClassifierLiveStreamDelegate` protocol.
* The methods in this protocol are optional. * The methods in this protocol are optional.

View File

@ -118,8 +118,7 @@ NS_SWIFT_NAME(ObjectDetector)
/** /**
* Performs object detection on the provided video frame of type `MPImage` using the whole * Performs object detection on the provided video frame of type `MPImage` using the whole
* image as region of interest. Rotation will be applied according to the `orientation` property of * image as region of interest. Rotation will be applied according to the `orientation` property of
* the provided `MPImage`. Only use this method when the `MPPObjectDetector` is created with * the provided `MPImage`. Only use this method when the `ObjectDetector` is created with `.video`.
* `.video`.
* *
* This method supports detecting objects in of RGBA images. If your `MPImage` has a source type of * This method supports detecting objects in of RGBA images. If your `MPImage` has a source type of
* .pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the following * .pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the following
@ -170,7 +169,7 @@ NS_SWIFT_NAME(ObjectDetector)
* that you request `AVCaptureVideoDataOutput` to output frames in `kCMPixelFormat_32RGBA` using its * that you request `AVCaptureVideoDataOutput` to output frames in `kCMPixelFormat_32RGBA` using its
* `videoSettings` property. * `videoSettings` property.
* *
* @param image A live stream image data of type `MPPImage` on which object detection is to be * @param image A live stream image data of type `MPImage` on which object detection is to be
* performed. * performed.
* @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input * @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input
* image is sent to the object detector. The input timestamps must be monotonically increasing. * image is sent to the object detector. The input timestamps must be monotonically increasing.

View File

@ -79,8 +79,7 @@ NS_SWIFT_NAME(ObjectDetectorOptions)
* An object that confirms to `ObjectDetectorLiveStreamDelegate` protocol. This object must * An object that confirms to `ObjectDetectorLiveStreamDelegate` protocol. This object must
* implement `objectDetector(_:didFinishDetectionWithResult:timestampInMilliseconds:error:)` to * implement `objectDetector(_:didFinishDetectionWithResult:timestampInMilliseconds:error:)` to
* receive the results of performing asynchronous object detection on images (i.e, when * receive the results of performing asynchronous object detection on images (i.e, when
* `runningMode` = * `runningMode` = `.liveStream`).
* `.liveStream`).
*/ */
@property(nonatomic, weak, nullable) id<MPPObjectDetectorLiveStreamDelegate> @property(nonatomic, weak, nullable) id<MPPObjectDetectorLiveStreamDelegate>
objectDetectorLiveStreamDelegate; objectDetectorLiveStreamDelegate;