Updated iOS Image Segmenter documentation to use Swift names

This commit is contained in:
Prianka Liz Kariat 2023-10-09 17:48:39 +05:30
parent 69fe645c43
commit 6c4b4469ae
4 changed files with 43 additions and 46 deletions

View File

@ -33,15 +33,15 @@ typedef NS_ENUM(NSUInteger, MPPMaskDataType) {
* Masks are stored as `UInt8 *` or `float *` objects. * Masks are stored as `UInt8 *` or `float *` objects.
* Every mask has an underlying type which can be accessed using `dataType`. You can access the * Every mask has an underlying type which can be accessed using `dataType`. You can access the
* mask as any other type using the appropriate properties. For example, if the underlying type is * mask as any other type using the appropriate properties. For example, if the underlying type is
* `MPPMaskDataTypeUInt8`, in addition to accessing the mask using `uint8Data`, you can access * `uInt8`, in addition to accessing the mask using `uint8Data`, you can access `float32Data` to get
* `float32Data` to get the 32 bit float data (with values ranging from 0.0 to 1.0). The first * the 32 bit float data (with values ranging from 0.0 to 1.0). The first time you access the data
* time you access the data as a type different from the underlying type, an expensive type * as a type different from the underlying type, an expensive type conversion is performed.
* conversion is performed. Subsequent accesses return a pointer to the memory location for the same * Subsequent accesses return a pointer to the memory location for the same type converted array. As
* type converted array. As type conversions can be expensive, it is recommended to limit the * type conversions can be expensive, it is recommended to limit the accesses to data of types
* accesses to data of types different from the underlying type. * different from the underlying type.
* *
* Masks that are returned from a MediaPipe Tasks are owned by by the underlying C++ Task. If you * Masks that are returned from a MediaPipe Tasks are owned by by the underlying C++ Task. If you
* need to extend the lifetime of these objects, you can invoke the `[MPPMask copy:]` method. * need to extend the lifetime of these objects, you can invoke the `copy()` method.
*/ */
NS_SWIFT_NAME(Mask) NS_SWIFT_NAME(Mask)
@interface MPPMask : NSObject <NSCopying> @interface MPPMask : NSObject <NSCopying>
@ -68,19 +68,18 @@ NS_SWIFT_NAME(Mask)
@property(nonatomic, readonly, assign) const float *float32Data; @property(nonatomic, readonly, assign) const float *float32Data;
/** /**
* Initializes an `MPPMask` object of type `MPPMaskDataTypeUInt8` with the given `UInt8*` data, * Initializes an `Mask` object of type `uInt8` with the given `UInt8*` data, width and height.
* width and height.
* *
* If `shouldCopy` is set to `YES`, the newly created `MPPMask` stores a reference to a deep copied * If `shouldCopy` is set to `true`, the newly created `Mask` stores a reference to a deep copied
* `uint8Data`. Since deep copies are expensive, it is recommended to not set `shouldCopy` unless * `uint8Data`. Since deep copies are expensive, it is recommended to not set `shouldCopy` unless
* the `MPPMask` must outlive the passed in `uint8Data`. * the `Mask` must outlive the passed in `uint8Data`.
* *
* @param uint8Data A pointer to the memory location of the `UInt8` data array. * @param uint8Data A pointer to the memory location of the `UInt8` data array.
* @param width The width of the mask. * @param width The width of the mask.
* @param height The height of the mask. * @param height The height of the mask.
* @param shouldCopy The height of the mask. * @param shouldCopy The height of the mask.
* *
* @return A new `MPPMask` instance with the given `UInt8*` data, width and height. * @return A new `Mask` instance with the given `UInt8*` data, width and height.
*/ */
- (nullable instancetype)initWithUInt8Data:(const UInt8 *)uint8Data - (nullable instancetype)initWithUInt8Data:(const UInt8 *)uint8Data
width:(NSInteger)width width:(NSInteger)width
@ -88,18 +87,17 @@ NS_SWIFT_NAME(Mask)
shouldCopy:(BOOL)shouldCopy NS_DESIGNATED_INITIALIZER; shouldCopy:(BOOL)shouldCopy NS_DESIGNATED_INITIALIZER;
/** /**
* Initializes an `MPPMask` object of type `MPPMaskDataTypeFloat32` with the given `float*` data, * Initializes an `Mask` object of type `float32` with the given `float*` data, width and height.
* width and height.
* *
* If `shouldCopy` is set to `YES`, the newly created `MPPMask` stores a reference to a deep copied * If `shouldCopy` is set to `true`, the newly created `Mask` stores a reference to a deep copied
* `float32Data`. Since deep copies are expensive, it is recommended to not set `shouldCopy` unless * `float32Data`. Since deep copies are expensive, it is recommended to not set `shouldCopy` unless
* the `MPPMask` must outlive the passed in `float32Data`. * the `Mask` must outlive the passed in `float32Data`.
* *
* @param float32Data A pointer to the memory location of the `float` data array. * @param float32Data A pointer to the memory location of the `float` data array.
* @param width The width of the mask. * @param width The width of the mask.
* @param height The height of the mask. * @param height The height of the mask.
* *
* @return A new `MPPMask` instance with the given `float*` data, width and height. * @return A new `Mask` instance with the given `float*` data, width and height.
*/ */
- (nullable instancetype)initWithFloat32Data:(const float *)float32Data - (nullable instancetype)initWithFloat32Data:(const float *)float32Data
width:(NSInteger)width width:(NSInteger)width

View File

@ -30,7 +30,6 @@
width:(NSInteger)width width:(NSInteger)width
height:(NSInteger)height height:(NSInteger)height
shouldCopy:(BOOL)shouldCopy { shouldCopy:(BOOL)shouldCopy {
self = [super init]; self = [super init];
if (self) { if (self) {
_width = width; _width = width;

View File

@ -23,11 +23,11 @@ NS_ASSUME_NONNULL_BEGIN
@class MPPImageSegmenter; @class MPPImageSegmenter;
/** /**
* This protocol defines an interface for the delegates of `MPPImageSegmenter` object to receive * This protocol defines an interface for the delegates of `ImageSegmenter` object to receive
* results of performing asynchronous segmentation on images (i.e, when `runningMode` = * results of performing asynchronous segmentation on images (i.e, when `runningMode` =
* `MPPRunningModeLiveStream`). * `liveStream`).
* *
* The delegate of `MPPImageSegmenter` must adopt `MPPImageSegmenterLiveStreamDelegate` protocol. * The delegate of `ImageSegmenter` must adopt `ImageSegmenterLiveStreamDelegate` protocol.
* The methods in this protocol are optional. * The methods in this protocol are optional.
*/ */
NS_SWIFT_NAME(ObjectDetectorLiveStreamDelegate) NS_SWIFT_NAME(ObjectDetectorLiveStreamDelegate)
@ -37,14 +37,14 @@ NS_SWIFT_NAME(ObjectDetectorLiveStreamDelegate)
/** /**
* This method notifies a delegate that the results of asynchronous segmentation of * This method notifies a delegate that the results of asynchronous segmentation of
* an image submitted to the `MPPImageSegmenter` is available. * an image submitted to the `ImageSegmenter` is available.
* *
* This method is called on a private serial dispatch queue created by the `MPPImageSegmenter` * This method is called on a private serial dispatch queue created by the `ImageSegmenter`
* for performing the asynchronous delegates calls. * for performing the asynchronous delegates calls.
* *
* @param imageSegmenter The image segmenter which performed the segmentation. This is useful to * @param imageSegmenter The image segmenter which performed the segmentation. This is useful to
* test equality when there are multiple instances of `MPPImageSegmenter`. * test equality when there are multiple instances of `ImageSegmenter`.
* @param result The `MPPImageSegmenterResult` object that contains a list of category or confidence * @param result The `ImageSegmenterResult` object that contains a list of category or confidence
* masks and optional quality scores. * masks and optional quality scores.
* @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input * @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input
* image was sent to the image segmenter. * image was sent to the image segmenter.
@ -58,26 +58,26 @@ NS_SWIFT_NAME(ObjectDetectorLiveStreamDelegate)
NS_SWIFT_NAME(imageSegmenter(_:didFinishSegmentation:timestampInMilliseconds:error:)); NS_SWIFT_NAME(imageSegmenter(_:didFinishSegmentation:timestampInMilliseconds:error:));
@end @end
/** Options for setting up a `MPPImageSegmenter`. */ /** Options for setting up a `ImageSegmenter`. */
NS_SWIFT_NAME(ImageSegmenterOptions) NS_SWIFT_NAME(ImageSegmenterOptions)
@interface MPPImageSegmenterOptions : MPPTaskOptions <NSCopying> @interface MPPImageSegmenterOptions : MPPTaskOptions <NSCopying>
/** /**
* Running mode of the image segmenter task. Defaults to `MPPRunningModeImage`. * Running mode of the image segmenter task. Defaults to `image`.
* `MPPImageSegmenter` can be created with one of the following running modes: * `ImageSegmenter` can be created with one of the following running modes:
* 1. `MPPRunningModeImage`: The mode for performing segmentation on single image inputs. * 1. `image`: The mode for performing segmentation on single image inputs.
* 2. `MPPRunningModeVideo`: The mode for performing segmentation on the decoded frames of a * 2. `video`: The mode for performing segmentation on the decoded frames of a
* video. * video.
* 3. `MPPRunningModeLiveStream`: The mode for performing segmentation on a live stream of * 3. `liveStream`: The mode for performing segmentation on a live stream of
* input data, such as from the camera. * input data, such as from the camera.
*/ */
@property(nonatomic) MPPRunningMode runningMode; @property(nonatomic) MPPRunningMode runningMode;
/** /**
* An object that confirms to `MPPImageSegmenterLiveStreamDelegate` protocol. This object must * An object that confirms to `ImageSegmenterLiveStreamDelegate` protocol. This object must
* implement `imageSegmenter:didFinishSegmentationWithResult:timestampInMilliseconds:error:` to * implement `imageSegmenter(_:didFinishSegmentationWithResult:timestampInMilliseconds:error:)` to
* receive the results of performing asynchronous segmentation on images (i.e, when `runningMode` = * receive the results of performing asynchronous segmentation on images (i.e, when `runningMode` =
* `MPPRunningModeLiveStream`). * `liveStream`).
*/ */
@property(nonatomic, weak, nullable) id<MPPImageSegmenterLiveStreamDelegate> @property(nonatomic, weak, nullable) id<MPPImageSegmenterLiveStreamDelegate>
imageSegmenterLiveStreamDelegate; imageSegmenterLiveStreamDelegate;

View File

@ -18,22 +18,22 @@
NS_ASSUME_NONNULL_BEGIN NS_ASSUME_NONNULL_BEGIN
/** Represents the segmentation results generated by `MPPImageSegmenter`. */ /** Represents the segmentation results generated by `ImageSegmenter`. */
NS_SWIFT_NAME(ImageSegmenterResult) NS_SWIFT_NAME(ImageSegmenterResult)
@interface MPPImageSegmenterResult : MPPTaskResult @interface MPPImageSegmenterResult : MPPTaskResult
/** /**
* An optional array of `MPPMask` objects. Each `MPPMask` in the array holds a 32 bit float array of * An optional array of `Mask` objects. Each `Mask` in the array holds a 32 bit float array of size
* size `image width` * `image height` which represents the confidence mask for each category. Each * `image width` * `image height` which represents the confidence mask for each category. Each
* element of the float array represents the confidence with which the model predicted that the * element of the float array represents the confidence with which the model predicted that the
* corresponding pixel belongs to the category that the mask represents, usually in the range [0,1]. * corresponding pixel belongs to the category that the mask represents, usually in the range [0,1].
*/ */
@property(nonatomic, readonly, nullable) NSArray<MPPMask *> *confidenceMasks; @property(nonatomic, readonly, nullable) NSArray<MPPMask *> *confidenceMasks;
/** /**
* An optional `MPPMask` that holds a`UInt8` array of size `image width` * `image height`. Each * An optional `Mask` that holds a`UInt8` array of size `image width` * `image height`. Each element
* element of this array represents the class to which the pixel in the original image was predicted * of this array represents the class to which the pixel in the original image was predicted to
* to belong to. * belong to.
*/ */
@property(nonatomic, readonly, nullable) MPPMask *categoryMask; @property(nonatomic, readonly, nullable) MPPMask *categoryMask;
@ -45,17 +45,17 @@ NS_SWIFT_NAME(ImageSegmenterResult)
@property(nonatomic, readonly, nullable) NSArray<NSNumber *> *qualityScores; @property(nonatomic, readonly, nullable) NSArray<NSNumber *> *qualityScores;
/** /**
* Initializes a new `MPPImageSegmenterResult` with the given array of confidence masks, category * Initializes a new `ImageSegmenterResult` with the given array of confidence masks, category mask,
* mask, quality scores and timestamp (in milliseconds). * quality scores and timestamp (in milliseconds).
* *
* @param confidenceMasks An optional array of `MPPMask` objects. Each `MPPMask` in the array must * @param confidenceMasks An optional array of `Mask` objects. Each `Mask` in the array must
* be of type `MPPMaskDataTypeFloat32`. * be of type `float32`.
* @param categoryMask An optional `MPMask` object of type `MPPMaskDataTypeUInt8`. * @param categoryMask An optional `Mask` object of type `uInt8`.
* @param qualityScores The quality scores of the result masks of type NSArray<NSNumber *> *. Each * @param qualityScores The quality scores of the result masks of type NSArray<NSNumber *> *. Each
* `NSNumber` in the array holds a `float`. * `NSNumber` in the array holds a `float`.
* @param timestampInMilliseconds The timestamp (in milliseconds) for this result. * @param timestampInMilliseconds The timestamp (in milliseconds) for this result.
* *
* @return An instance of `MPPImageSegmenterResult` initialized with the given array of confidence * @return An instance of `ImageSegmenterResult` initialized with the given array of confidence
* masks, category mask, quality scores and timestamp (in milliseconds). * masks, category mask, quality scores and timestamp (in milliseconds).
*/ */
- (instancetype)initWithConfidenceMasks:(nullable NSArray<MPPMask *> *)confidenceMasks - (instancetype)initWithConfidenceMasks:(nullable NSArray<MPPMask *> *)confidenceMasks