Merge pull request #4721 from priankakariatyml:ios-doc-updates

PiperOrigin-RevId: 560133185
This commit is contained in:
Copybara-Service 2023-08-25 10:37:51 -07:00
commit 5d2d8f9ab2
18 changed files with 344 additions and 386 deletions

View File

@ -44,14 +44,14 @@ NS_SWIFT_NAME(ResultCategory)
@property(nonatomic, readonly, nullable) NSString *displayName; @property(nonatomic, readonly, nullable) NSString *displayName;
/** /**
* Initializes a new `MPPCategory` with the given index, score, category name and display name. * Initializes a new `Category` with the given index, score, category name and display name.
* *
* @param index The index of the label in the corresponding label file. * @param index The index of the label in the corresponding label file.
* @param score The probability score of this label category. * @param score The probability score of this label category.
* @param categoryName The label of this category object. * @param categoryName The label of this category object.
* @param displayName The display name of the label. * @param displayName The display name of the label.
* *
* @return An instance of `MPPCategory` initialized with the given index, score, category name and * @return An instance of `Category` initialized with the given index, score, category name and
* display name. * display name.
*/ */
- (instancetype)initWithIndex:(NSInteger)index - (instancetype)initWithIndex:(NSInteger)index

View File

@ -32,32 +32,32 @@ NS_SWIFT_NAME(Classifications)
/** The optional name of the classifier head, which is the corresponding tensor metadata name. */ /** The optional name of the classifier head, which is the corresponding tensor metadata name. */
@property(nonatomic, readonly, nullable) NSString *headName; @property(nonatomic, readonly, nullable) NSString *headName;
/** An array of `MPPCategory` objects containing the predicted categories. */ /** An array of `Category` objects containing the predicted categories. */
@property(nonatomic, readonly) NSArray<MPPCategory *> *categories; @property(nonatomic, readonly) NSArray<MPPCategory *> *categories;
/** /**
* Initializes a new `MPPClassifications` object with the given head index and array of categories. * Initializes a new `Classifications` object with the given head index and array of categories.
* Head name is initialized to `nil`. * Head name is initialized to `nil`.
* *
* @param headIndex The index of the classifier head. * @param headIndex The index of the classifier head.
* @param categories An array of `MPPCategory` objects containing the predicted categories. * @param categories An array of `Category` objects containing the predicted categories.
* *
* @return An instance of `MPPClassifications` initialized with the given head index and * @return An instance of `Classifications` initialized with the given head index and
* array of categories. * array of categories.
*/ */
- (instancetype)initWithHeadIndex:(NSInteger)headIndex - (instancetype)initWithHeadIndex:(NSInteger)headIndex
categories:(NSArray<MPPCategory *> *)categories; categories:(NSArray<MPPCategory *> *)categories;
/** /**
* Initializes a new `MPPClassifications` with the given head index, head name and array of * Initializes a new `Classifications` with the given head index, head name and array of
* categories. * categories.
* *
* @param headIndex The index of the classifier head. * @param headIndex The index of the classifier head.
* @param headName The name of the classifier head, which is the corresponding tensor metadata * @param headName The name of the classifier head, which is the corresponding tensor metadata
* name. * name.
* @param categories An array of `MPPCategory` objects containing the predicted categories. * @param categories An array of `Category` objects containing the predicted categories.
* *
* @return An object of `MPPClassifications` initialized with the given head index, head name and * @return An object of `Classifications` initialized with the given head index, head name and
* array of categories. * array of categories.
*/ */
- (instancetype)initWithHeadIndex:(NSInteger)headIndex - (instancetype)initWithHeadIndex:(NSInteger)headIndex
@ -78,7 +78,7 @@ NS_SWIFT_NAME(ClassificationResult)
@interface MPPClassificationResult : NSObject @interface MPPClassificationResult : NSObject
/** /**
* An Array of `MPPClassifications` objects containing the predicted categories for each head of * An Array of `Classifications` objects containing the predicted categories for each head of
* the model. * the model.
*/ */
@property(nonatomic, readonly) NSArray<MPPClassifications *> *classifications; @property(nonatomic, readonly) NSArray<MPPClassifications *> *classifications;
@ -93,15 +93,15 @@ NS_SWIFT_NAME(ClassificationResult)
@property(nonatomic, readonly) NSInteger timestampInMilliseconds; @property(nonatomic, readonly) NSInteger timestampInMilliseconds;
/** /**
* Initializes a new `MPPClassificationResult` with the given array of classifications and time * Initializes a new `ClassificationResult` with the given array of classifications and time
* stamp (in milliseconds). * stamp (in milliseconds).
* *
* @param classifications An Array of `MPPClassifications` objects containing the predicted * @param classifications An Array of `Classifications` objects containing the predicted
* categories for each head of the model. * categories for each head of the model.
* @param timestampInMilliseconds The timestamp (in milliseconds) of the start of the chunk of data * @param timestampInMilliseconds The timestamp (in milliseconds) of the start of the chunk of data
* corresponding to these results. * corresponding to these results.
* *
* @return An instance of `MPPClassificationResult` initialized with the given array of * @return An instance of `ClassificationResult` initialized with the given array of
* classifications and timestamp (in milliseconds). * classifications and timestamp (in milliseconds).
*/ */
- (instancetype)initWithClassifications:(NSArray<MPPClassifications *> *)classifications - (instancetype)initWithClassifications:(NSArray<MPPClassifications *> *)classifications

View File

@ -35,7 +35,7 @@ NS_SWIFT_NAME(NormalizedKeypoint)
@property(nonatomic, readonly) float score; @property(nonatomic, readonly) float score;
/** /**
* Initializes a new `MPPNormalizedKeypoint` object with the given location, label and score. * Initializes a new `NormalizedKeypoint` object with the given location, label and score.
* You must pass 0.0 for `score` if it is not present. * You must pass 0.0 for `score` if it is not present.
* *
* @param location The (x,y) coordinates location of the normalized keypoint. * @param location The (x,y) coordinates location of the normalized keypoint.
@ -43,7 +43,7 @@ NS_SWIFT_NAME(NormalizedKeypoint)
* @param score The optional score of the normalized keypoint. You must pass 0.0 for score if it * @param score The optional score of the normalized keypoint. You must pass 0.0 for score if it
* is not present. * is not present.
* *
* @return An instance of `MPPNormalizedKeypoint` initialized with the given given location, label * @return An instance of `NormalizedKeypoint` initialized with the given given location, label
* and score. * and score.
*/ */
- (instancetype)initWithLocation:(CGPoint)location - (instancetype)initWithLocation:(CGPoint)location
@ -56,18 +56,18 @@ NS_SWIFT_NAME(NormalizedKeypoint)
@end @end
/** Represents one detected object in the results of `MPPObjectDetector`. */ /** Represents one detected object in the results of `ObjectDetector`. */
NS_SWIFT_NAME(Detection) NS_SWIFT_NAME(Detection)
@interface MPPDetection : NSObject @interface MPPDetection : NSObject
/** An array of `MPPCategory` objects containing the predicted categories. */ /** An array of `Category` objects containing the predicted categories. */
@property(nonatomic, readonly) NSArray<MPPCategory *> *categories; @property(nonatomic, readonly) NSArray<MPPCategory *> *categories;
/** The bounding box of the detected object. */ /** The bounding box of the detected object. */
@property(nonatomic, readonly) CGRect boundingBox; @property(nonatomic, readonly) CGRect boundingBox;
/** /**
* An optional array of `MPPNormalizedKeypoint` objects associated with the detection. Keypoints * An optional array of `NormalizedKeypoint` objects associated with the detection. Keypoints
* represent interesting points related to the detection. For example, the keypoints represent the * represent interesting points related to the detection. For example, the keypoints represent the
* eyes, ear and mouth from the from detection model. In template matching detection, e.g. KNIFT, * eyes, ear and mouth from the from detection model. In template matching detection, e.g. KNIFT,
* they can instead represent the feature points for template matching. * they can instead represent the feature points for template matching.
@ -75,18 +75,18 @@ NS_SWIFT_NAME(Detection)
@property(nonatomic, readonly, nullable) NSArray<MPPNormalizedKeypoint *> *keypoints; @property(nonatomic, readonly, nullable) NSArray<MPPNormalizedKeypoint *> *keypoints;
/** /**
* Initializes a new `MPPDetection` object with the given array of categories, bounding box and * Initializes a new `Detection` object with the given array of categories, bounding box and
* optional array of keypoints; * optional array of keypoints;
* *
* @param categories A list of `MPPCategory` objects that contain category name, display name, * @param categories A list of `Category` objects that contain category name, display name,
* score, and the label index. * score, and the label index.
* @param boundingBox A `CGRect` that represents the bounding box. * @param boundingBox A `CGRect` that represents the bounding box.
* @param keypoints: An optional array of `MPPNormalizedKeypoint` objects associated with the * @param keypoints: An optional array of `NormalizedKeypoint` objects associated with the
* detection. Keypoints represent interesting points related to the detection. For example, the * detection. Keypoints represent interesting points related to the detection. For example, the
* keypoints represent the eyes, ear and mouth from the face detection model. In template matching * keypoints represent the eyes, ear and mouth from the face detection model. In template matching
* detection, e.g. KNIFT, they can instead represent the feature points for template matching. * detection, e.g. KNIFT, they can instead represent the feature points for template matching.
* *
* @return An instance of `MPPDetection` initialized with the given array of categories, bounding * @return An instance of `Detection` initialized with the given array of categories, bounding
* box and `nil` keypoints. * box and `nil` keypoints.
*/ */
- (instancetype)initWithCategories:(NSArray<MPPCategory *> *)categories - (instancetype)initWithCategories:(NSArray<MPPCategory *> *)categories

View File

@ -49,13 +49,13 @@ NS_SWIFT_NAME(Landmark)
@property(nonatomic, readonly, nullable) NSNumber *presence; @property(nonatomic, readonly, nullable) NSNumber *presence;
/** /**
* Initializes a new `MPPLandmark` object with the given x, y and z coordinates. * Initializes a new `Landmark` object with the given x, y and z coordinates.
* *
* @param x The x coordinates of the landmark. * @param x The x coordinates of the landmark.
* @param y The y coordinates of the landmark. * @param y The y coordinates of the landmark.
* @param z The z coordinates of the landmark. * @param z The z coordinates of the landmark.
* *
* @return An instance of `MPPLandmark` initialized with the given x, y and z coordinates. * @return An instance of `Landmark` initialized with the given x, y and z coordinates.
*/ */
- (instancetype)initWithX:(float)x - (instancetype)initWithX:(float)x
y:(float)y y:(float)y
@ -103,13 +103,13 @@ NS_SWIFT_NAME(NormalizedLandmark)
@property(nonatomic, readonly, nullable) NSNumber *presence; @property(nonatomic, readonly, nullable) NSNumber *presence;
/** /**
* Initializes a new `MPPNormalizedLandmark` object with the given x, y and z coordinates. * Initializes a new `NormalizedLandmark` object with the given x, y and z coordinates.
* *
* @param x The x coordinates of the landmark. * @param x The x coordinates of the landmark.
* @param y The y coordinates of the landmark. * @param y The y coordinates of the landmark.
* @param z The z coordinates of the landmark. * @param z The z coordinates of the landmark.
* *
* @return An instance of `MPPNormalizedLandmark` initialized with the given x, y and z coordinates. * @return An instance of `NormalizedLandmark` initialized with the given x, y and z coordinates.
*/ */
- (instancetype)initWithX:(float)x - (instancetype)initWithX:(float)x
y:(float)y y:(float)y

View File

@ -40,10 +40,10 @@ NS_SWIFT_NAME(MPImage)
@property(nonatomic, readonly) CGFloat height; @property(nonatomic, readonly) CGFloat height;
/** /**
* The display orientation of the image. If `imageSourceType` is `MPPImageSourceTypeImage`, the * The display orientation of the image. If `imageSourceType` is `.image`, the
* default value is `image.imageOrientation`; otherwise the default value is * default value is `image.imageOrientation`; otherwise the default value is
* `UIImageOrientationUp`. If the `MPPImage` is being used as input for any MediaPipe vision tasks * `UIImage.Orientation.up`. If the `MPImage` is being used as input for any MediaPipe vision tasks
* and is set to any orientation other than `UIImageOrientationUp`, inference will be performed on * and is set to any orientation other than `UIImage.Orientation.up`, inference will be performed on
* a rotated copy of the image according to the orientation. * a rotated copy of the image according to the orientation.
*/ */
@property(nonatomic, readonly) UIImageOrientation orientation; @property(nonatomic, readonly) UIImageOrientation orientation;
@ -54,46 +54,48 @@ NS_SWIFT_NAME(MPImage)
/** The source image. `nil` if `imageSourceType` is not `.image`. */ /** The source image. `nil` if `imageSourceType` is not `.image`. */
@property(nonatomic, readonly, nullable) UIImage *image; @property(nonatomic, readonly, nullable) UIImage *image;
/** The source pixel buffer. `nil` if `imageSourceType` is not `.pixelBuffer`. */ /** The source pixel buffer. `nil` if ``imageSourceType`` is not `.pixelBuffer`. */
@property(nonatomic, readonly, nullable) CVPixelBufferRef pixelBuffer; @property(nonatomic, readonly, nullable) CVPixelBufferRef pixelBuffer;
/** The source sample buffer. `nil` if `imageSourceType` is not `.sampleBuffer`. */ /** The source sample buffer. `nil` if ``imageSourceType`` is not `.sampleBuffer`. */
@property(nonatomic, readonly, nullable) CMSampleBufferRef sampleBuffer; @property(nonatomic, readonly, nullable) CMSampleBufferRef sampleBuffer;
/** /**
* Initializes an `MPPImage` object with the given `UIImage`. * Initializes an `MPImage` object with the given `UIImage`.
* The orientation of the newly created `MPPImage` will be equal to the `imageOrientation` of *
* The orientation of the newly created `MPImage` will be equal to the `imageOrientation` of
* `UIImage` and when sent to the vision tasks for inference, rotation will be applied accordingly. * `UIImage` and when sent to the vision tasks for inference, rotation will be applied accordingly.
* To create an `MPPImage` with an orientation different from its `imageOrientation`, please use * To create an `MPImage` with an orientation different from its `imageOrientation`, please use
* `[MPPImage initWithImage:orientation:error:]`. * `MPImage(uiImage:orientation:)s`.
* *
* @param image The image to use as the source. Its `CGImage` property must not be `NULL`. * @param image The image to use as the source. Its `CGImage` property must not be `NULL`.
* @param error An optional error parameter populated when there is an error in initializing the * @param error An optional error parameter populated when there is an error in initializing the
* `MPPImage`. * `MPImage`.
* *
* @return A new `MPPImage` instance with the given image as the source. `nil` if the given * @return A new `MPImage` instance with the given image as the source. `nil` if the given
* `image` is `nil` or invalid. * `image` is `nil` or invalid.
*/ */
- (nullable instancetype)initWithUIImage:(UIImage *)image error:(NSError **)error; - (nullable instancetype)initWithUIImage:(UIImage *)image error:(NSError **)error;
/** /**
* Initializes an `MPPImage` object with the given `UIImage` and orientation. The given orientation * Initializes an `MPImage` object with the given `UIImage` and orientation.
* will be used to calculate the rotation to be applied to the `UIImage` before inference is *
* performed on it by the vision tasks. The `imageOrientation` stored in the `UIImage` is ignored * The given orientation will be used to calculate the rotation to be applied to the `UIImage`
* when `MPImage` objects created by this method are sent to the vision tasks for inference. Use * before inference is performed on it by the vision tasks. The `imageOrientation` stored in the
* `[MPPImage initWithImage:orientation:error:]` to initialize images with the `imageOrientation` of * `UIImage` is ignored when `MPImage` objects created by this method are sent to the vision tasks
* for inference. Use `MPImage(uiImage:)` to initialize images with the `imageOrientation` of
* `UIImage`. * `UIImage`.
* *
* If the newly created `MPPImage` is used as input for any MediaPipe vision tasks, inference * If the newly created `MPImage` is used as input for any MediaPipe vision tasks, inference
* will be performed on a copy of the image rotated according to the orientation. * will be performed on a copy of the image rotated according to the orientation.
* *
* @param image The image to use as the source. Its `CGImage` property must not be `NULL`. * @param image The image to use as the source. Its `CGImage` property must not be `NULL`.
* @param orientation The display orientation of the image. This will be stored in the property * @param orientation The display orientation of the image. This will be stored in the property
* `orientation` `MPPImage` and will override the `imageOrientation` of the passed in `UIImage`. * `orientation` `MPImage` and will override the `imageOrientation` of the passed in `UIImage`.
* @param error An optional error parameter populated when there is an error in initializing the * @param error An optional error parameter populated when there is an error in initializing the
* `MPPImage`. * `MPImage`.
* *
* @return A new `MPPImage` instance with the given image as the source. `nil` if the given * @return A new `MPImage` instance with the given image as the source. `nil` if the given
* `image` is `nil` or invalid. * `image` is `nil` or invalid.
*/ */
- (nullable instancetype)initWithUIImage:(UIImage *)image - (nullable instancetype)initWithUIImage:(UIImage *)image
@ -101,36 +103,36 @@ NS_SWIFT_NAME(MPImage)
error:(NSError **)error NS_DESIGNATED_INITIALIZER; error:(NSError **)error NS_DESIGNATED_INITIALIZER;
/** /**
* Initializes an `MPPImage` object with the given pixel buffer. * Initializes an `MPImage` object with the given pixel buffer.
* *
* The orientation of the newly created `MPPImage` will be `UIImageOrientationUp`. * The orientation of the newly created `MPImage` will be `UIImageOrientationUp`.
* Hence, if this image is used as input for any MediaPipe vision tasks, inference will be * Hence, if this image is used as input for any MediaPipe vision tasks, inference will be
* performed on the it without any rotation. To create an `MPPImage` with a different * performed on the it without any rotation. To create an `MPImage` with a different
* orientation, please use `[MPPImage initWithPixelBuffer:orientation:error:]`. * orientation, please use `MPImage(pixelBuffer:orientation:)`.
* *
* @param pixelBuffer The pixel buffer to use as the source. It will be retained by the new * @param pixelBuffer The pixel buffer to use as the source. It will be retained by the new
* `MPPImage` instance for the duration of its lifecycle. * `MPImage` instance for the duration of its lifecycle.
* @param error An optional error parameter populated when there is an error in initializing the * @param error An optional error parameter populated when there is an error in initializing the
* `MPPImage`. * `MPImage`.
* *
* @return A new `MPPImage` instance with the given pixel buffer as the source. `nil` if the * @return A new `MPImage` instance with the given pixel buffer as the source. `nil` if the
* given pixel buffer is `nil` or invalid. * given pixel buffer is `nil` or invalid.
*/ */
- (nullable instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer error:(NSError **)error; - (nullable instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer error:(NSError **)error;
/** /**
* Initializes an `MPPImage` object with the given pixel buffer and orientation. * Initializes an `MPImage` object with the given pixel buffer and orientation.
* *
* If the newly created `MPPImage` is used as input for any MediaPipe vision tasks, inference * If the newly created `MPImage` is used as input for any MediaPipe vision tasks, inference
* will be performed on a copy of the image rotated according to the orientation. * will be performed on a copy of the image rotated according to the orientation.
* *
* @param pixelBuffer The pixel buffer to use as the source. It will be retained by the new * @param pixelBuffer The pixel buffer to use as the source. It will be retained by the new
* `MPPImage` instance for the duration of its lifecycle. * `MPImage` instance for the duration of its lifecycle.
* @param orientation The display orientation of the image. * @param orientation The display orientation of the image.
* @param error An optional error parameter populated when there is an error in initializing the * @param error An optional error parameter populated when there is an error in initializing the
* `MPPImage`. * `MPImage`.
* *
* @return A new `MPPImage` instance with the given orientation and pixel buffer as the source. * @return A new `MPImage` instance with the given orientation and pixel buffer as the source.
* `nil` if the given pixel buffer is `nil` or invalid. * `nil` if the given pixel buffer is `nil` or invalid.
*/ */
- (nullable instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer - (nullable instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer
@ -138,35 +140,35 @@ NS_SWIFT_NAME(MPImage)
error:(NSError **)error NS_DESIGNATED_INITIALIZER; error:(NSError **)error NS_DESIGNATED_INITIALIZER;
/** /**
* Initializes an `MPPImage` object with the given sample buffer. * Initializes an `MPImage` object with the given sample buffer.
* *
* The orientation of the newly created `MPPImage` will be `UIImageOrientationUp`. * The orientation of the newly created `MPImage` will be `UIImageOrientationUp`.
* Hence, if this image is used as input for any MediaPipe vision tasks, inference will be * Hence, if this image is used as input for any MediaPipe vision tasks, inference will be
* performed on the it without any rotation. To create an `MPPImage` with a different orientation, * performed on the it without any rotation. To create an `MPImage` with a different orientation,
* please use `[MPPImage initWithSampleBuffer:orientation:error:]`. * please use `MPImage(sampleBuffer:orientation:)`.
* *
* @param sampleBuffer The sample buffer to use as the source. It will be retained by the new * @param sampleBuffer The sample buffer to use as the source. It will be retained by the new
* `MPPImage` instance for the duration of its lifecycle. The sample buffer must be based on * `MPImage` instance for the duration of its lifecycle. The sample buffer must be based on
* a pixel buffer (not compressed data). In practice, it should be the video output of the * a pixel buffer (not compressed data). In practice, it should be the video output of the
* camera on an iOS device, not other arbitrary types of `CMSampleBuffer`s. * camera on an iOS device, not other arbitrary types of `CMSampleBuffer`s.
* @return A new `MPPImage` instance with the given sample buffer as the source. `nil` if the * @return A new `MPImage` instance with the given sample buffer as the source. `nil` if the
* given sample buffer is `nil` or invalid. * given sample buffer is `nil` or invalid.
*/ */
- (nullable instancetype)initWithSampleBuffer:(CMSampleBufferRef)sampleBuffer - (nullable instancetype)initWithSampleBuffer:(CMSampleBufferRef)sampleBuffer
error:(NSError **)error; error:(NSError **)error;
/** /**
* Initializes an `MPPImage` object with the given sample buffer and orientation. * Initializes an `MPImage` object with the given sample buffer and orientation.
* *
* If the newly created `MPPImage` is used as input for any MediaPipe vision tasks, inference * If the newly created `MPImage` is used as input for any MediaPipe vision tasks, inference
* will be performed on a copy of the image rotated according to the orientation. * will be performed on a copy of the image rotated according to the orientation.
* *
* @param sampleBuffer The sample buffer to use as the source. It will be retained by the new * @param sampleBuffer The sample buffer to use as the source. It will be retained by the new
* `MPPImage` instance for the duration of its lifecycle. The sample buffer must be based on * `MPImage` instance for the duration of its lifecycle. The sample buffer must be based on
* a pixel buffer (not compressed data). In practice, it should be the video output of the * a pixel buffer (not compressed data). In practice, it should be the video output of the
* camera on an iOS device, not other arbitrary types of `CMSampleBuffer`s. * camera on an iOS device, not other arbitrary types of `CMSampleBuffer`s.
* @param orientation The display orientation of the image. * @param orientation The display orientation of the image.
* @return A new `MPPImage` instance with the given orientation and sample buffer as the source. * @return A new `MPImage` instance with the given orientation and sample buffer as the source.
* `nil` if the given sample buffer is `nil` or invalid. * `nil` if the given sample buffer is `nil` or invalid.
*/ */
- (nullable instancetype)initWithSampleBuffer:(CMSampleBufferRef)sampleBuffer - (nullable instancetype)initWithSampleBuffer:(CMSampleBufferRef)sampleBuffer

View File

@ -57,27 +57,23 @@ NS_SWIFT_NAME(FaceDetector)
@interface MPPFaceDetector : NSObject @interface MPPFaceDetector : NSObject
/** /**
* Creates a new instance of `MPPFaceDetector` from an absolute path to a TensorFlow Lite model * Creates a new instance of `FaceDetector` from an absolute path to a TensorFlow Lite model
* file stored locally on the device and the default `MPPFaceDetector`. * file stored locally on the device and the default `FaceDetector`.
* *
* @param modelPath An absolute path to a TensorFlow Lite model file stored locally on the device. * @param modelPath An absolute path to a TensorFlow Lite model file stored locally on the device.
* @param error An optional error parameter populated when there is an error in initializing the
* face detector.
* *
* @return A new instance of `MPPFaceDetector` with the given model path. `nil` if there is an * @return A new instance of `FaceDetector` with the given model path. `nil` if there is an
* error in initializing the face detector. * error in initializing the face detector.
*/ */
- (nullable instancetype)initWithModelPath:(NSString *)modelPath error:(NSError **)error; - (nullable instancetype)initWithModelPath:(NSString *)modelPath error:(NSError **)error;
/** /**
* Creates a new instance of `MPPFaceDetector` from the given `MPPFaceDetectorOptions`. * Creates a new instance of `FaceDetector` from the given `FaceDetectorOptions`.
* *
* @param options The options of type `MPPFaceDetectorOptions` to use for configuring the * @param options The options of type `FaceDetectorOptions` to use for configuring the
* `MPPFaceDetector`. * `FaceDetector`.
* @param error An optional error parameter populated when there is an error in initializing the
* face detector.
* *
* @return A new instance of `MPPFaceDetector` with the given options. `nil` if there is an error * @return A new instance of `FaceDetector` with the given options. `nil` if there is an error
* in initializing the face detector. * in initializing the face detector.
*/ */
- (nullable instancetype)initWithOptions:(MPPFaceDetectorOptions *)options - (nullable instancetype)initWithOptions:(MPPFaceDetectorOptions *)options
@ -86,23 +82,21 @@ NS_SWIFT_NAME(FaceDetector)
/** /**
* Performs face detection on the provided MPPImage using the whole image as region of * Performs face detection on the provided MPPImage using the whole image as region of
* interest. Rotation will be applied according to the `orientation` property of the provided * interest. Rotation will be applied according to the `orientation` property of the provided
* `MPPImage`. Only use this method when the `MPPFaceDetector` is created with * `MPImage`. Only use this method when the `MPPFaceDetector` is created with running mode
* `MPPRunningModeImage`. * `.image`.
* *
* This method supports classification of RGBA images. If your `MPPImage` has a source type of * This method supports classification of RGBA images. If your `MPImage` has a source type of
* `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer * `.pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the
* must have one of the following pixel format types: * following pixel format types:
* 1. kCVPixelFormatType_32BGRA * 1. kCVPixelFormatType_32BGRA
* 2. kCVPixelFormatType_32RGBA * 2. kCVPixelFormatType_32RGBA
* *
* If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is * If your `MPImage` has a source type of `.image` ensure that the color space is
* RGB with an Alpha channel. * RGB with an Alpha channel.
* *
* @param image The `MPPImage` on which face detection is to be performed. * @param image The `MPImage` on which face detection is to be performed.
* @param error An optional error parameter populated when there is an error in performing face
* detection on the input image.
* *
* @return An `MPPFaceDetectorResult` face that contains a list of detections, each detection * @return An `FaceDetectorResult` face that contains a list of detections, each detection
* has a bounding box that is expressed in the unrotated input frame of reference coordinates * has a bounding box that is expressed in the unrotated input frame of reference coordinates
* system, i.e. in `[0,image_width) x [0,image_height)`, which are the dimensions of the underlying * system, i.e. in `[0,image_width) x [0,image_height)`, which are the dimensions of the underlying
* image data. * image data.
@ -111,27 +105,25 @@ NS_SWIFT_NAME(FaceDetector)
error:(NSError **)error NS_SWIFT_NAME(detect(image:)); error:(NSError **)error NS_SWIFT_NAME(detect(image:));
/** /**
* Performs face detection on the provided video frame of type `MPPImage` using the whole * Performs face detection on the provided video frame of type `MPImage` using the whole
* image as region of interest. Rotation will be applied according to the `orientation` property of * image as region of interest. Rotation will be applied according to the `orientation` property of
* the provided `MPPImage`. Only use this method when the `MPPFaceDetector` is created with * the provided `MPImage`. Only use this method when the `FaceDetector` is created with running
* `MPPRunningModeVideo`. * mode `.video`.
* *
* This method supports classification of RGBA images. If your `MPPImage` has a source type of * This method supports classification of RGBA images. If your `MPImage` has a source type of
* `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer * `.pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the
* must have one of the following pixel format types: * following pixel format types:
* 1. kCVPixelFormatType_32BGRA * 1. kCVPixelFormatType_32BGRA
* 2. kCVPixelFormatType_32RGBA * 2. kCVPixelFormatType_32RGBA
* *
* If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is * If your `MPImage` has a source type of `.image` ensure that the color space is RGB with an Alpha
* RGB with an Alpha channel. * channel.
* *
* @param image The `MPPImage` on which face detection is to be performed. * @param image The `MPImage` on which face detection is to be performed.
* @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input * @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input
* timestamps must be monotonically increasing. * timestamps must be monotonically increasing.
* @param error An optional error parameter populated when there is an error in performing face
* detection on the input image.
* *
* @return An `MPPFaceDetectorResult` face that contains a list of detections, each detection * @return An `FaceDetectorResult` face that contains a list of detections, each detection
* has a bounding box that is expressed in the unrotated input frame of reference coordinates * has a bounding box that is expressed in the unrotated input frame of reference coordinates
* system, i.e. in `[0,image_width) x [0,image_height)`, which are the dimensions of the underlying * system, i.e. in `[0,image_width) x [0,image_height)`, which are the dimensions of the underlying
* image data. * image data.
@ -142,39 +134,37 @@ NS_SWIFT_NAME(FaceDetector)
NS_SWIFT_NAME(detect(videoFrame:timestampInMilliseconds:)); NS_SWIFT_NAME(detect(videoFrame:timestampInMilliseconds:));
/** /**
* Sends live stream image data of type `MPPImage` to perform face detection using the whole * Sends live stream image data of type `MPImage` to perform face detection using the whole
* image as region of interest. Rotation will be applied according to the `orientation` property of * image as region of interest. Rotation will be applied according to the `orientation` property of
* the provided `MPPImage`. Only use this method when the `MPPFaceDetector` is created with * the provided `MPImage`. Only use this method when the `FaceDetector` is created with
* `MPPRunningModeLiveStream`. * `.liveStream`.
* *
* The object which needs to be continuously notified of the available results of face * The object which needs to be continuously notified of the available results of face
* detection must confirm to `MPPFaceDetectorLiveStreamDelegate` protocol and implement the * detection must confirm to `FaceDetectorLiveStreamDelegate` protocol and implement the
* `faceDetector:didFinishDetectionWithResult:timestampInMilliseconds:error:` delegate method. * `faceDetector(_:didFinishDetectionWithResult:timestampInMilliseconds:error:)` delegate method.
* *
* It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent * It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent
* to the face detector. The input timestamps must be monotonically increasing. * to the face detector. The input timestamps must be monotonically increasing.
* *
* This method supports classification of RGBA images. If your `MPPImage` has a source type of * This method supports classification of RGBA images. If your `MPImage` has a source type of
* `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer * `.pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the
* must have one of the following pixel format types: * following pixel format types:
* 1. kCVPixelFormatType_32BGRA * 1. kCVPixelFormatType_32BGRA
* 2. kCVPixelFormatType_32RGBA * 2. kCVPixelFormatType_32RGBA
* *
* If the input `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color * If the input `MPImage` has a source type of `.image` ensure that the color
* space is RGB with an Alpha channel. * space is RGB with an Alpha channel.
* *
* If this method is used for classifying live camera frames using `AVFoundation`, ensure that you * If this method is used for classifying live camera frames using `AVFoundation`, ensure that you
* request `AVCaptureVideoDataOutput` to output frames in `kCMPixelFormat_32RGBA` using its * request `AVCaptureVideoDataOutput` to output frames in `kCMPixelFormat_32RGBA` using its
* `videoSettings` property. * `videoSettings` property.
* *
* @param image A live stream image data of type `MPPImage` on which face detection is to be * @param image A live stream image data of type `MPImage` on which face detection is to be
* performed. * performed.
* @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input * @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input
* image is sent to the face detector. The input timestamps must be monotonically increasing. * image is sent to the face detector. The input timestamps must be monotonically increasing.
* @param error An optional error parameter populated when there is an error in performing face
* detection on the input live stream image data.
* *
* @return `YES` if the image was sent to the task successfully, otherwise `NO`. * @return `true` if the image was sent to the task successfully, otherwise `false`.
*/ */
- (BOOL)detectAsyncInImage:(MPPImage *)image - (BOOL)detectAsyncInImage:(MPPImage *)image
timestampInMilliseconds:(NSInteger)timestampInMilliseconds timestampInMilliseconds:(NSInteger)timestampInMilliseconds

View File

@ -23,11 +23,11 @@ NS_ASSUME_NONNULL_BEGIN
@class MPPFaceDetector; @class MPPFaceDetector;
/** /**
* This protocol defines an interface for the delegates of `MPPFaceDetector` face to receive * This protocol defines an interface for the delegates of `FaceDetector` face to receive
* results of performing asynchronous face detection on images (i.e, when `runningMode` = * results of performing asynchronous face detection on images (i.e, when `runningMode` =
* `MPPRunningModeLiveStream`). * `.liveStream`).
* *
* The delegate of `MPPFaceDetector` must adopt `MPPFaceDetectorLiveStreamDelegate` protocol. * The delegate of `FaceDetector` must adopt `FaceDetectorLiveStreamDelegate` protocol.
* The methods in this protocol are optional. * The methods in this protocol are optional.
*/ */
NS_SWIFT_NAME(FaceDetectorLiveStreamDelegate) NS_SWIFT_NAME(FaceDetectorLiveStreamDelegate)
@ -37,14 +37,14 @@ NS_SWIFT_NAME(FaceDetectorLiveStreamDelegate)
/** /**
* This method notifies a delegate that the results of asynchronous face detection of * This method notifies a delegate that the results of asynchronous face detection of
* an image submitted to the `MPPFaceDetector` is available. * an image submitted to the `FaceDetector` is available.
* *
* This method is called on a private serial dispatch queue created by the `MPPFaceDetector` * This method is called on a private serial dispatch queue created by the `FaceDetector`
* for performing the asynchronous delegates calls. * for performing the asynchronous delegates calls.
* *
* @param faceDetector The face detector which performed the face detection. * @param faceDetector The face detector which performed the face detection.
* This is useful to test equality when there are multiple instances of `MPPFaceDetector`. * This is useful to test equality when there are multiple instances of `FaceDetector`.
* @param result The `MPPFaceDetectorResult` object that contains a list of detections, each * @param result The `FaceDetectorResult` object that contains a list of detections, each
* detection has a bounding box that is expressed in the unrotated input frame of reference * detection has a bounding box that is expressed in the unrotated input frame of reference
* coordinates system, i.e. in `[0,image_width) x [0,image_height)`, which are the dimensions of the * coordinates system, i.e. in `[0,image_width) x [0,image_height)`, which are the dimensions of the
* underlying image data. * underlying image data.
@ -60,26 +60,26 @@ NS_SWIFT_NAME(FaceDetectorLiveStreamDelegate)
NS_SWIFT_NAME(faceDetector(_:didFinishDetection:timestampInMilliseconds:error:)); NS_SWIFT_NAME(faceDetector(_:didFinishDetection:timestampInMilliseconds:error:));
@end @end
/** Options for setting up a `MPPFaceDetector`. */ /** Options for setting up a `FaceDetector`. */
NS_SWIFT_NAME(FaceDetectorOptions) NS_SWIFT_NAME(FaceDetectorOptions)
@interface MPPFaceDetectorOptions : MPPTaskOptions <NSCopying> @interface MPPFaceDetectorOptions : MPPTaskOptions <NSCopying>
/** /**
* Running mode of the face detector task. Defaults to `MPPRunningModeImage`. * Running mode of the face detector task. Defaults to `.image`.
* `MPPFaceDetector` can be created with one of the following running modes: * `FaceDetector` can be created with one of the following running modes:
* 1. `MPPRunningModeImage`: The mode for performing face detection on single image inputs. * 1. `.image`: The mode for performing face detection on single image inputs.
* 2. `MPPRunningModeVideo`: The mode for performing face detection on the decoded frames of a * 2. `.video`: The mode for performing face detection on the decoded frames of a
* video. * video.
* 3. `MPPRunningModeLiveStream`: The mode for performing face detection on a live stream of * 3. `.liveStream`: The mode for performing face detection on a live stream of
* input data, such as from the camera. * input data, such as from the camera.
*/ */
@property(nonatomic) MPPRunningMode runningMode; @property(nonatomic) MPPRunningMode runningMode;
/** /**
* An object that confirms to `MPPFaceDetectorLiveStreamDelegate` protocol. This object must * An object that confirms to `FaceDetectorLiveStreamDelegate` protocol. This object must
* implement `faceDetector:didFinishDetectionWithResult:timestampInMilliseconds:error:` to receive * implement `faceDetector(_:didFinishDetectionWithResult:timestampInMilliseconds:error:)` to
* the results of performing asynchronous face detection on images (i.e, when `runningMode` = * receive the results of performing asynchronous face detection on images (i.e, when `runningMode`
* `MPPRunningModeLiveStream`). * = `.liveStream`).
*/ */
@property(nonatomic, weak, nullable) id<MPPFaceDetectorLiveStreamDelegate> @property(nonatomic, weak, nullable) id<MPPFaceDetectorLiveStreamDelegate>
faceDetectorLiveStreamDelegate; faceDetectorLiveStreamDelegate;

View File

@ -18,27 +18,27 @@
NS_ASSUME_NONNULL_BEGIN NS_ASSUME_NONNULL_BEGIN
/** Represents the detection results generated by `MPPFaceDetector`. */ /** Represents the detection results generated by `FaceDetector`. */
NS_SWIFT_NAME(FaceDetectorResult) NS_SWIFT_NAME(FaceDetectorResult)
@interface MPPFaceDetectorResult : MPPTaskResult @interface MPPFaceDetectorResult : MPPTaskResult
/** /**
* The array of `MPPDetection` objects each of which has a bounding box that is expressed in the * The array of `Detection` objects each of which has a bounding box that is expressed in the
* unrotated input frame of reference coordinates system, i.e. in `[0,image_width) x * unrotated input frame of reference coordinates system, i.e. in `[0,image_width) x
* [0,image_height)`, which are the dimensions of the underlying image data. * [0,image_height)`, which are the dimensions of the underlying image data.
*/ */
@property(nonatomic, readonly) NSArray<MPPDetection *> *detections; @property(nonatomic, readonly) NSArray<MPPDetection *> *detections;
/** /**
* Initializes a new `MPPFaceDetectorResult` with the given array of detections and timestamp (in * Initializes a new `FaceDetectorResult` with the given array of detections and timestamp (in
* milliseconds). * milliseconds).
* *
* @param detections An array of `MPPDetection` objects each of which has a bounding box that is * @param detections An array of `Detection` objects each of which has a bounding box that is
* expressed in the unrotated input frame of reference coordinates system, i.e. in `[0,image_width) * expressed in the unrotated input frame of reference coordinates system, i.e. in `[0,image_width)
* x [0,image_height)`, which are the dimensions of the underlying image data. * x [0,image_height)`, which are the dimensions of the underlying image data.
* @param timestampInMilliseconds The timestamp (in milliseconds) for this result. * @param timestampInMilliseconds The timestamp (in milliseconds) for this result.
* *
* @return An instance of `MPPFaceDetectorResult` initialized with the given array of detections * @return An instance of `FaceDetectorResult` initialized with the given array of detections
* and timestamp (in milliseconds). * and timestamp (in milliseconds).
*/ */
- (instancetype)initWithDetections:(NSArray<MPPDetection *> *)detections - (instancetype)initWithDetections:(NSArray<MPPDetection *> *)detections

View File

@ -30,27 +30,23 @@ NS_SWIFT_NAME(FaceLandmarker)
@interface MPPFaceLandmarker : NSObject @interface MPPFaceLandmarker : NSObject
/** /**
* Creates a new instance of `MPPFaceLandmarker` from an absolute path to a TensorFlow Lite model * Creates a new instance of `FaceLandmarker` from an absolute path to a TensorFlow Lite model
* file stored locally on the device and the default `MPPFaceLandmarker`. * file stored locally on the device and the default `FaceLandmarker`.
* *
* @param modelPath An absolute path to a TensorFlow Lite model file stored locally on the device. * @param modelPath An absolute path to a TensorFlow Lite model file stored locally on the device.
* @param error An optional error parameter populated when there is an error in initializing the
* face landmaker.
* *
* @return A new instance of `MPPFaceLandmarker` with the given model path. `nil` if there is an * @return A new instance of `FaceLandmarker` with the given model path. `nil` if there is an
* error in initializing the face landmaker. * error in initializing the face landmaker.
*/ */
- (nullable instancetype)initWithModelPath:(NSString *)modelPath error:(NSError **)error; - (nullable instancetype)initWithModelPath:(NSString *)modelPath error:(NSError **)error;
/** /**
* Creates a new instance of `MPPFaceLandmarker` from the given `MPPFaceLandmarkerOptions`. * Creates a new instance of `FaceLandmarker` from the given `FaceLandmarkerOptions`.
* *
* @param options The options of type `MPPFaceLandmarkerOptions` to use for configuring the * @param options The options of type `FaceLandmarkerOptions` to use for configuring the
* `MPPFaceLandmarker`. * `MPPFaceLandmarker`.
* @param error An optional error parameter populated when there is an error in initializing the
* face landmaker.
* *
* @return A new instance of `MPPFaceLandmarker` with the given options. `nil` if there is an error * @return A new instance of `FaceLandmarker` with the given options. `nil` if there is an error
* in initializing the face landmaker. * in initializing the face landmaker.
*/ */
- (nullable instancetype)initWithOptions:(MPPFaceLandmarkerOptions *)options - (nullable instancetype)initWithOptions:(MPPFaceLandmarkerOptions *)options
@ -59,49 +55,45 @@ NS_SWIFT_NAME(FaceLandmarker)
/** /**
* Performs face landmark detection on the provided MPPImage using the whole image as region of * Performs face landmark detection on the provided MPPImage using the whole image as region of
* interest. Rotation will be applied according to the `orientation` property of the provided * interest. Rotation will be applied according to the `orientation` property of the provided
* `MPPImage`. Only use this method when the `MPPFaceLandmarker` is created with * `MPImage`. Only use this method when the `FaceLandmarker` is created with `.image`.
* `MPPRunningModeImage`.
* *
* This method supports RGBA images. If your `MPPImage` has a source type of * This method supports RGBA images. If your `MPPImage` has a source type of `.pixelBuffer` or
* `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer * `.sampleBuffer`, the underlying pixel buffer must have one of the following pixel format
* must have one of the following pixel format types: * types:
* 1. kCVPixelFormatType_32BGRA * 1. kCVPixelFormatType_32BGRA
* 2. kCVPixelFormatType_32RGBA * 2. kCVPixelFormatType_32RGBA
* *
* If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is * If your `MPImage` has a source type of `.image` ensure that the color space is RGB with an
* RGB with an Alpha channel. * Alpha channel.
* *
* @param image The `MPPImage` on which face landmark detection is to be performed. * @param image The `MPImage` on which face landmark detection is to be performed.
* @param error An optional error parameter populated when there is an error in performing face
* landmark detection on the input image.
* *
* @return An `MPPFaceLandmarkerResult` that contains a list of landmarks. * @return An `MPPFaceLandmarkerResult` that contains a list of landmarks. `nil` if there is an
* error in initializing the face landmaker.
*/ */
- (nullable MPPFaceLandmarkerResult *)detectInImage:(MPPImage *)image - (nullable MPPFaceLandmarkerResult *)detectInImage:(MPPImage *)image
error:(NSError **)error NS_SWIFT_NAME(detect(image:)); error:(NSError **)error NS_SWIFT_NAME(detect(image:));
/** /**
* Performs face landmark detection on the provided video frame of type `MPPImage` using the whole * Performs face landmark detection on the provided video frame of type `MPImage` using the whole
* image as region of interest. Rotation will be applied according to the `orientation` property of * image as region of interest. Rotation will be applied according to the `orientation` property of
* the provided `MPPImage`. Only use this method when the `MPPFaceLandmarker` is created with * the provided `MPImage`. Only use this method when the `MPPFaceLandmarker` is created with
* `MPPRunningModeVideo`. * running mode `.video`.
* *
* This method supports RGBA images. If your `MPPImage` has a source type of * This method supports RGBA images. If your `MPImage` has a source type of `.pixelBuffer` or
* `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer * `.sampleBuffer`, the underlying pixel buffer must have one of the following pixel format types:
* must have one of the following pixel format types:
* 1. kCVPixelFormatType_32BGRA * 1. kCVPixelFormatType_32BGRA
* 2. kCVPixelFormatType_32RGBA * 2. kCVPixelFormatType_32RGBA
* *
* If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is * If your `MPImage` has a source type of `.image` ensure that the color space is RGB with an Alpha
* RGB with an Alpha channel. * channel.
* *
* @param image The `MPPImage` on which face landmark detection is to be performed. * @param image The `MPImage` on which face landmark detection is to be performed.
* @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input * @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input
* timestamps must be monotonically increasing. * timestamps must be monotonically increasing.
* @param error An optional error parameter populated when there is an error in performing face
* landmark detection on the input image.
* *
* @return An `MPPFaceLandmarkerResult` that contains a list of landmarks. * @return An `FaceLandmarkerResult` that contains a list of landmarks. `nil` if there is an
* error in initializing the face landmaker.
*/ */
- (nullable MPPFaceLandmarkerResult *)detectInVideoFrame:(MPPImage *)image - (nullable MPPFaceLandmarkerResult *)detectInVideoFrame:(MPPImage *)image
timestampInMilliseconds:(NSInteger)timestampInMilliseconds timestampInMilliseconds:(NSInteger)timestampInMilliseconds
@ -109,39 +101,36 @@ NS_SWIFT_NAME(FaceLandmarker)
NS_SWIFT_NAME(detect(videoFrame:timestampInMilliseconds:)); NS_SWIFT_NAME(detect(videoFrame:timestampInMilliseconds:));
/** /**
* Sends live stream image data of type `MPPImage` to perform face landmark detection using the * Sends live stream image data of type `MPImage` to perform face landmark detection using the
* whole image as region of interest. Rotation will be applied according to the `orientation` * whole image as region of interest. Rotation will be applied according to the `orientation`
* property of the provided `MPPImage`. Only use this method when the `MPPFaceLandmarker` is created * property of the provided `MPImage`. Only use this method when the `FaceLandmarker` is created
* with `MPPRunningModeLiveStream`. * with `.liveStream`.
* *
* The object which needs to be continuously notified of the available results of face * The object which needs to be continuously notified of the available results of face
* detection must confirm to `MPPFaceLandmarkerLiveStreamDelegate` protocol and implement the * detection must confirm to `FaceLandmarkerLiveStreamDelegate` protocol and implement the
* `faceLandmarker:didFinishDetectionWithResult:timestampInMilliseconds:error:` delegate method. * `faceLandmarker(_:didFinishDetectionWithResult:timestampInMilliseconds:error:)` delegate method.
* *
* It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent * It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent
* to the face detector. The input timestamps must be monotonically increasing. * to the face detector. The input timestamps must be monotonically increasing.
* *
* This method supports RGBA images. If your `MPPImage` has a source type of * This method supports RGBA images. If your `MPImage` has a source type of `.pixelBuffer` or
* `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer * `.sampleBuffer`, the underlying pixel buffer must have one of the following pixel format types:
* must have one of the following pixel format types:
* 1. kCVPixelFormatType_32BGRA * 1. kCVPixelFormatType_32BGRA
* 2. kCVPixelFormatType_32RGBA * 2. kCVPixelFormatType_32RGBA
* *
* If the input `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color * If the input `MPImage` has a source type of `.image` ensure that the color space is RGB with an
* space is RGB with an Alpha channel. * Alpha channel.
* *
* If this method is used for classifying live camera frames using `AVFoundation`, ensure that you * If this method is used for classifying live camera frames using `AVFoundation`, ensure that you
* request `AVCaptureVideoDataOutput` to output frames in `kCMPixelFormat_32RGBA` using its * request `AVCaptureVideoDataOutput` to output frames in `kCMPixelFormat_32RGBA` using its
* `videoSettings` property. * `videoSettings` property.
* *
* @param image A live stream image data of type `MPPImage` on which face landmark detection is to * @param image A live stream image data of type `MPImage` on which face landmark detection is to be
* be performed. * performed.
* @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input * @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input
* image is sent to the face detector. The input timestamps must be monotonically increasing. * image is sent to the face detector. The input timestamps must be monotonically increasing.
* @param error An optional error parameter populated when there is an error when sending the input
* image to the graph.
* *
* @return `YES` if the image was sent to the task successfully, otherwise `NO`. * @return `true` if the image was sent to the task successfully, otherwise `false`.
*/ */
- (BOOL)detectAsyncInImage:(MPPImage *)image - (BOOL)detectAsyncInImage:(MPPImage *)image
timestampInMilliseconds:(NSInteger)timestampInMilliseconds timestampInMilliseconds:(NSInteger)timestampInMilliseconds

View File

@ -23,11 +23,11 @@ NS_ASSUME_NONNULL_BEGIN
@class MPPFaceLandmarker; @class MPPFaceLandmarker;
/** /**
* This protocol defines an interface for the delegates of `MPPFaceLandmarker` face to receive * This protocol defines an interface for the delegates of `FaceLandmarker` face to receive
* results of performing asynchronous face detection on images (i.e, when `runningMode` = * results of performing asynchronous face detection on images (i.e, when `runningMode` =
* `MPPRunningModeLiveStream`). * `.liveStream`).
* *
* The delegate of `MPPFaceLandmarker` must adopt `MPPFaceLandmarkerLiveStreamDelegate` protocol. * The delegate of `FaceLandmarker` must adopt `FaceLandmarkerLiveStreamDelegate` protocol.
* The methods in this protocol are optional. * The methods in this protocol are optional.
*/ */
NS_SWIFT_NAME(FaceLandmarkerLiveStreamDelegate) NS_SWIFT_NAME(FaceLandmarkerLiveStreamDelegate)
@ -35,14 +35,14 @@ NS_SWIFT_NAME(FaceLandmarkerLiveStreamDelegate)
/** /**
* This method notifies a delegate that the results of asynchronous face detection of * This method notifies a delegate that the results of asynchronous face detection of
* an image submitted to the `MPPFaceLandmarker` is available. * an image submitted to the `FaceLandmarker` is available.
* *
* This method is called on a private serial dispatch queue created by the `MPPFaceLandmarker` * This method is called on a private serial dispatch queue created by the `FaceLandmarker`
* for performing the asynchronous delegates calls. * for performing the asynchronous delegates calls.
* *
* @param faceLandmarker The face landmarker which performed the face landmark detctions. * @param faceLandmarker The face landmarker which performed the face landmark detctions.
* This is useful to test equality when there are multiple instances of `MPPFaceLandmarker`. * This is useful to test equality when there are multiple instances of `FaceLandmarker`.
* @param result The `MPPFaceLandmarkerResult` object that contains a list of landmarks. * @param result The `FaceLandmarkerResult` object that contains a list of landmarks.
* @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input * @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input
* image was sent to the face detector. * image was sent to the face detector.
* @param error An optional error parameter populated when there is an error in performing face * @param error An optional error parameter populated when there is an error in performing face
@ -55,26 +55,25 @@ NS_SWIFT_NAME(FaceLandmarkerLiveStreamDelegate)
NS_SWIFT_NAME(faceLandmarker(_:didFinishDetection:timestampInMilliseconds:error:)); NS_SWIFT_NAME(faceLandmarker(_:didFinishDetection:timestampInMilliseconds:error:));
@end @end
/** Options for setting up a `MPPFaceLandmarker`. */ /** Options for setting up a `FaceLandmarker`. */
NS_SWIFT_NAME(FaceLandmarkerOptions) NS_SWIFT_NAME(FaceLandmarkerOptions)
@interface MPPFaceLandmarkerOptions : MPPTaskOptions <NSCopying> @interface MPPFaceLandmarkerOptions : MPPTaskOptions <NSCopying>
/** /**
* Running mode of the face landmark dection task. Defaults to `MPPRunningModeImage`. * Running mode of the face landmark dection task. Defaults to `.image`. `FaceLandmarker` can be
* `MPPFaceLandmarker` can be created with one of the following running modes: * created with one of the following running modes:
* 1. `MPPRunningModeImage`: The mode for performing face detection on single image inputs. * 1. `.image`: The mode for performing face detection on single image inputs.
* 2. `MPPRunningModeVideo`: The mode for performing face detection on the decoded frames of a * 2. `.video`: The mode for performing face detection on the decoded frames of a video.
* video. * 3. `.liveStream`: The mode for performing face detection on a live stream of input data, such as
* 3. `MPPRunningModeLiveStream`: The mode for performing face detection on a live stream of * from the camera.
* input data, such as from the camera.
*/ */
@property(nonatomic) MPPRunningMode runningMode; @property(nonatomic) MPPRunningMode runningMode;
/** /**
* An object that confirms to `MPPFaceLandmarkerLiveStreamDelegate` protocol. This object must * An object that confirms to `FaceLandmarkerLiveStreamDelegate` protocol. This object must
* implement `faceLandmarker:didFinishDetectionWithResult:timestampInMilliseconds:error:` to receive * implement `faceLandmarker(_:didFinishDetectionWithResult:timestampInMilliseconds:error:)` to
* the results of performing asynchronous face landmark detection on images (i.e, when `runningMode` * receive the results of performing asynchronous face landmark detection on images (i.e, when
* = `MPPRunningModeLiveStream`). * `runningMode` = `.liveStream`).
*/ */
@property(nonatomic, weak, nullable) id<MPPFaceLandmarkerLiveStreamDelegate> @property(nonatomic, weak, nullable) id<MPPFaceLandmarkerLiveStreamDelegate>
faceLandmarkerLiveStreamDelegate; faceLandmarkerLiveStreamDelegate;

View File

@ -54,7 +54,7 @@ NS_SWIFT_NAME(TransformMatrix)
@end @end
/** Represents the detection results generated by `MPPFaceLandmarker`. */ /** Represents the detection results generated by `FaceLandmarker`. */
NS_SWIFT_NAME(FaceLandmarkerResult) NS_SWIFT_NAME(FaceLandmarkerResult)
@interface MPPFaceLandmarkerResult : MPPTaskResult @interface MPPFaceLandmarkerResult : MPPTaskResult
@ -72,16 +72,16 @@ NS_SWIFT_NAME(FaceLandmarkerResult)
@property(nonatomic, readonly) NSArray<MPPTransformMatrix *> *facialTransformationMatrixes; @property(nonatomic, readonly) NSArray<MPPTransformMatrix *> *facialTransformationMatrixes;
/** /**
* Initializes a new `MPPFaceLandmarkerResult` with the given array of landmarks, blendshapes, * Initializes a new `FaceLandmarkerResult` with the given array of landmarks, blendshapes,
* facialTransformationMatrixes and timestamp (in milliseconds). * facialTransformationMatrixes and timestamp (in milliseconds).
* *
* @param faceLandmarks An array of `MPPNormalizedLandmark` objects. * @param faceLandmarks An array of `NormalizedLandmark` objects.
* @param faceBlendshapes An array of `MPPClassifications` objects. * @param faceBlendshapes An array of `Classifications` objects.
* @param facialTransformationMatrixes An array of flattended matrices. * @param facialTransformationMatrixes An array of flattended matrices.
* @param timestampInMilliseconds The timestamp (in milliseconds) for this result. * @param timestampInMilliseconds The timestamp (in milliseconds) for this result.
* *
* @return An instance of `MPPFaceLandmarkerResult` initialized with the given array of detections * @return An instance of `FaceLandmarkerResult` initialized with the given array of detections and
* and timestamp (in milliseconds). * timestamp (in milliseconds).
*/ */
- (instancetype)initWithFaceLandmarks:(NSArray<NSArray<MPPNormalizedLandmark *> *> *)faceLandmarks - (instancetype)initWithFaceLandmarks:(NSArray<NSArray<MPPNormalizedLandmark *> *> *)faceLandmarks
faceBlendshapes:(NSArray<MPPClassifications *> *)faceBlendshapes faceBlendshapes:(NSArray<MPPClassifications *> *)faceBlendshapes

View File

@ -53,28 +53,24 @@ NS_SWIFT_NAME(ImageClassifier)
@interface MPPImageClassifier : NSObject @interface MPPImageClassifier : NSObject
/** /**
* Creates a new instance of `MPPImageClassifier` from an absolute path to a TensorFlow Lite model * Creates a new instance of `ImageClassifier` from an absolute path to a TensorFlow Lite model file
* file stored locally on the device and the default `MPPImageClassifierOptions`. * stored locally on the device and the default `ImageClassifierOptions`.
* *
* @param modelPath An absolute path to a TensorFlow Lite model file stored locally on the device. * @param modelPath An absolute path to a TensorFlow Lite model file stored locally on the device.
* @param error An optional error parameter populated when there is an error in initializing the
* image classifier.
* *
* @return A new instance of `MPPImageClassifier` with the given model path. `nil` if there is an * @return A new instance of `ImageClassifier` with the given model path. `nil` if there is an
* error in initializing the image classifier. * error in initializing the image classifier.
*/ */
- (nullable instancetype)initWithModelPath:(NSString *)modelPath error:(NSError **)error; - (nullable instancetype)initWithModelPath:(NSString *)modelPath error:(NSError **)error;
/** /**
* Creates a new instance of `MPPImageClassifier` from the given `MPPImageClassifierOptions`. * Creates a new instance of `ImageClassifier` from the given `ImageClassifierOptions`.
* *
* @param options The options of type `MPPImageClassifierOptions` to use for configuring the * @param options The options of type `ImageClassifierOptions` to use for configuring the
* `MPPImageClassifier`. * `ImageClassifier`.
* @param error An optional error parameter populated when there is an error in initializing the
* image classifier.
* *
* @return A new instance of `MPPImageClassifier` with the given options. `nil` if there is an error * @return A new instance of `ImageClassifier` with the given options. `nil` if there is an error in
* in initializing the image classifier. * initializing the image classifier.
*/ */
- (nullable instancetype)initWithOptions:(MPPImageClassifierOptions *)options - (nullable instancetype)initWithOptions:(MPPImageClassifierOptions *)options
error:(NSError **)error NS_DESIGNATED_INITIALIZER; error:(NSError **)error NS_DESIGNATED_INITIALIZER;
@ -82,49 +78,46 @@ NS_SWIFT_NAME(ImageClassifier)
/** /**
* Performs image classification on the provided MPPImage using the whole image as region of * Performs image classification on the provided MPPImage using the whole image as region of
* interest. Rotation will be applied according to the `orientation` property of the provided * interest. Rotation will be applied according to the `orientation` property of the provided
* `MPPImage`. Only use this method when the `MPPImageClassifier` is created with * `MPImage`. Only use this method when the `ImageClassifier` is created with running mode,
* `MPPRunningModeImage`. * `.image`.
* This method supports classification of RGBA images. If your `MPPImage` has a source type of *
* `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer * This method supports classification of RGBA images. If your `MPImage` has a source type
* must have one of the following pixel format types: * ofm`.pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the following
* pixel format types:
* 1. kCVPixelFormatType_32BGRA * 1. kCVPixelFormatType_32BGRA
* 2. kCVPixelFormatType_32RGBA * 2. kCVPixelFormatType_32RGBA
* *
* If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is * If your `MPImage` has a source type of `.image` ensure that the color space is RGB with an Alpha
* RGB with an Alpha channel. * channel.
* *
* @param image The `MPPImage` on which image classification is to be performed. * @param image The `MPPImage` on which image classification is to be performed.
* @param error An optional error parameter populated when there is an error in performing image
* classification on the input image.
* *
* @return An `MPPImageClassifierResult` object that contains a list of image classifications. * @return An `ImageClassifierResult` object that contains a list of image classifications.
*/ */
- (nullable MPPImageClassifierResult *)classifyImage:(MPPImage *)image - (nullable MPPImageClassifierResult *)classifyImage:(MPPImage *)image
error:(NSError **)error error:(NSError **)error
NS_SWIFT_NAME(classify(image:)); NS_SWIFT_NAME(classify(image:));
/** /**
* Performs image classification on the provided `MPPImage` cropped to the specified region of * Performs image classification on the provided `MPImage` cropped to the specified region of
* interest. Rotation will be applied on the cropped image according to the `orientation` property * interest. Rotation will be applied on the cropped image according to the `orientation` property
* of the provided `MPPImage`. Only use this method when the `MPPImageClassifier` is created with * of the provided `MPImage`. Only use this method when the `MPPImageClassifier` is created with
* `MPPRunningModeImage`. * running mode, `.image`.
* *
* This method supports classification of RGBA images. If your `MPPImage` has a source type of * This method supports classification of RGBA images. If your `MPImage` has a source type of
* `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer * `.pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the following
* must have one of the following pixel format types: * pixel format types:
* 1. kCVPixelFormatType_32BGRA * 1. kCVPixelFormatType_32BGRA
* 2. kCVPixelFormatType_32RGBA * 2. kCVPixelFormatType_32RGBA
* *
* If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is * If your `MPImage` has a source type of `.image` ensure that the color space is RGB with an Alpha
* RGB with an Alpha channel. * channel.
* *
* @param image The `MPPImage` on which image classification is to be performed. * @param image The `MPImage` on which image classification is to be performed.
* @param roi A `CGRect` specifying the region of interest within the given `MPPImage`, on which * @param roi A `CGRect` specifying the region of interest within the given `MPImage`, on which
* image classification should be performed. * image classification should be performed.
* @param error An optional error parameter populated when there is an error in performing image
* classification on the input image.
* *
* @return An `MPPImageClassifierResult` object that contains a list of image classifications. * @return An `ImageClassifierResult` object that contains a list of image classifications.
*/ */
- (nullable MPPImageClassifierResult *)classifyImage:(MPPImage *)image - (nullable MPPImageClassifierResult *)classifyImage:(MPPImage *)image
regionOfInterest:(CGRect)roi regionOfInterest:(CGRect)roi
@ -132,30 +125,28 @@ NS_SWIFT_NAME(ImageClassifier)
NS_SWIFT_NAME(classify(image:regionOfInterest:)); NS_SWIFT_NAME(classify(image:regionOfInterest:));
/** /**
* Performs image classification on the provided video frame of type `MPPImage` using the whole * Performs image classification on the provided video frame of type `MPImage` using the whole
* image as region of interest. Rotation will be applied according to the `orientation` property of * image as region of interest. Rotation will be applied according to the `orientation` property of
* the provided `MPPImage`. Only use this method when the `MPPImageClassifier` is created with * the provided `MPImage`. Only use this method when the `MPPImageClassifier` is created with
* `MPPRunningModeVideo`. * running mode `.video`.
* *
* It's required to provide the video frame's timestamp (in milliseconds). The input timestamps must * It's required to provide the video frame's timestamp (in milliseconds). The input timestamps must
* be monotonically increasing. * be monotonically increasing.
* *
* This method supports classification of RGBA images. If your `MPPImage` has a source type of * This method supports classification of RGBA images. If your `MPImage` has a source type of
* `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer * `.pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the following
* must have one of the following pixel format types: * pixel format types:
* 1. kCVPixelFormatType_32BGRA * 1. kCVPixelFormatType_32BGRA
* 2. kCVPixelFormatType_32RGBA * 2. kCVPixelFormatType_32RGBA
* *
* If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is * If your `MPImage` has a source type of `.image` ensure that the color space is RGB with an Alpha
* RGB with an Alpha channel. * channel.
* *
* @param image The `MPPImage` on which image classification is to be performed. * @param image The `MPPImage` on which image classification is to be performed.
* @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input * @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input
* timestamps must be monotonically increasing. * timestamps must be monotonically increasing.
* @param error An optional error parameter populated when there is an error in performing image
* classification on the input video frame.
* *
* @return An `MPPImageClassifierResult` object that contains a list of image classifications. * @return An `ImageClassifierResult` object that contains a list of image classifications.
*/ */
- (nullable MPPImageClassifierResult *)classifyVideoFrame:(MPPImage *)image - (nullable MPPImageClassifierResult *)classifyVideoFrame:(MPPImage *)image
timestampInMilliseconds:(NSInteger)timestampInMilliseconds timestampInMilliseconds:(NSInteger)timestampInMilliseconds
@ -163,33 +154,30 @@ NS_SWIFT_NAME(ImageClassifier)
NS_SWIFT_NAME(classify(videoFrame:timestampInMilliseconds:)); NS_SWIFT_NAME(classify(videoFrame:timestampInMilliseconds:));
/** /**
* Performs image classification on the provided video frame of type `MPPImage` cropped to the * Performs image classification on the provided video frame of type `MPImage` cropped to the
* specified region of interest. Rotation will be applied according to the `orientation` property of * specified region of interest. Rotation will be applied according to the `orientation` property of
* the provided `MPPImage`. Only use this method when the `MPPImageClassifier` is created with * the provided `MPImage`. Only use this method when the `ImageClassifier` is created with `.video`.
* `MPPRunningModeVideo`.
* *
* It's required to provide the video frame's timestamp (in milliseconds). The input timestamps must * It's required to provide the video frame's timestamp (in milliseconds). The input timestamps must
* be monotonically increasing. * be monotonically increasing.
* *
* This method supports classification of RGBA images. If your `MPPImage` has a source type of * This method supports classification of RGBA images. If your `MPImage` has a source type of
* `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer * `.pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the following
* must have one of the following pixel format types: * pixel format types:
* 1. kCVPixelFormatType_32BGRA * 1. kCVPixelFormatType_32BGRA
* 2. kCVPixelFormatType_32RGBA * 2. kCVPixelFormatType_32RGBA
* *
* If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is * If your `MPImage` has a source type of `.image` ensure that the color space is RGB with an Alpha
* RGB with an Alpha channel. * channel.
* *
* @param image A live stream image data of type `MPPImage` on which image classification is to be * @param image A live stream image data of type `MPImage` on which image classification is to be
* performed. * performed.
* @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input * @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input
* timestamps must be monotonically increasing. * timestamps must be monotonically increasing.
* @param roi A `CGRect` specifying the region of interest within the video frame of type * @param roi A `CGRect` specifying the region of interest within the video frame of type
* `MPPImage`, on which image classification should be performed. * `MPImage`, on which image classification should be performed.
* @param error An optional error parameter populated when there is an error in performing image
* classification on the input video frame.
* *
* @return An `MPPImageClassifierResult` object that contains a list of image classifications. * @return An `ImageClassifierResult` object that contains a list of image classifications.
*/ */
- (nullable MPPImageClassifierResult *)classifyVideoFrame:(MPPImage *)image - (nullable MPPImageClassifierResult *)classifyVideoFrame:(MPPImage *)image
timestampInMilliseconds:(NSInteger)timestampInMilliseconds timestampInMilliseconds:(NSInteger)timestampInMilliseconds
@ -198,40 +186,38 @@ NS_SWIFT_NAME(ImageClassifier)
NS_SWIFT_NAME(classify(videoFrame:timestampInMilliseconds:regionOfInterest:)); NS_SWIFT_NAME(classify(videoFrame:timestampInMilliseconds:regionOfInterest:));
/** /**
* Sends live stream image data of type `MPPImage` to perform image classification using the whole * Sends live stream image data of type `MPImage` to perform image classification using the whole
* image as region of interest. Rotation will be applied according to the `orientation` property of * image as region of interest. Rotation will be applied according to the `orientation` property of
* the provided `MPPImage`. Only use this method when the `MPPImageClassifier` is created with * the provided `MPImage`. Only use this method when the `ImageClassifier` is created with
* `MPPRunningModeLiveStream`. * `MPPRunningModeLiveStream`.
* *
* The object which needs to be continuously notified of the available results of image * The object which needs to be continuously notified of the available results of image
* classification must confirm to `MPPImageClassifierLiveStreamDelegate` protocol and implement the * classification must confirm to `ImageClassifierLiveStreamDelegate` protocol and implement the
* `imageClassifier:didFinishClassificationWithResult:timestampInMilliseconds:error:` * `imageClassifier(_:didFinishClassificationWithResult:timestampInMilliseconds:error:)`
* delegate method. * delegate method.
* *
* It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent * It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent
* to the image classifier. The input timestamps must be monotonically increasing. * to the image classifier. The input timestamps must be monotonically increasing.
* *
* This method supports classification of RGBA images. If your `MPPImage` has a source type of * This method supports classification of RGBA images. If your `MPImage` has a source type of
* `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer * .pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the following
* must have one of the following pixel format types: * pixel format types:
* 1. kCVPixelFormatType_32BGRA * 1. kCVPixelFormatType_32BGRA
* 2. kCVPixelFormatType_32RGBA * 2. kCVPixelFormatType_32RGBA
* *
* If the input `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color * If the input `MPImage` has a source type of `.image` ensure that the color space is RGB with an
* space is RGB with an Alpha channel. * Alpha channel.
* *
* If this method is used for classifying live camera frames using `AVFoundation`, ensure that you * If this method is used for classifying live camera frames using `AVFoundation`, ensure that you
* request `AVCaptureVideoDataOutput` to output frames in `kCMPixelFormat_32RGBA` using its * request `AVCaptureVideoDataOutput` to output frames in `kCMPixelFormat_32RGBA` using its
* `videoSettings` property. * `videoSettings` property.
* *
* @param image A live stream image data of type `MPPImage` on which image classification is to be * @param image A live stream image data of type `MPImage` on which image classification is to be
* performed. * performed.
* @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input * @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input
* image is sent to the image classifier. The input timestamps must be monotonically increasing. * image is sent to the image classifier. The input timestamps must be monotonically increasing.
* @param error An optional error parameter populated when there is an error in performing image
* classification on the input live stream image data.
* *
* @return `YES` if the image was sent to the task successfully, otherwise `NO`. * @return `true` if the image was sent to the task successfully, otherwise `false`.
*/ */
- (BOOL)classifyAsyncImage:(MPPImage *)image - (BOOL)classifyAsyncImage:(MPPImage *)image
timestampInMilliseconds:(NSInteger)timestampInMilliseconds timestampInMilliseconds:(NSInteger)timestampInMilliseconds
@ -239,42 +225,40 @@ NS_SWIFT_NAME(ImageClassifier)
NS_SWIFT_NAME(classifyAsync(image:timestampInMilliseconds:)); NS_SWIFT_NAME(classifyAsync(image:timestampInMilliseconds:));
/** /**
* Sends live stream image data of type ``MPPImage`` to perform image classification, cropped to the * Sends live stream image data of type `MPImage` to perform image classification, cropped to the
* specified region of interest.. Rotation will be applied according to the `orientation` property * specified region of interest.. Rotation will be applied according to the `orientation` property
* of the provided `MPPImage`. Only use this method when the `MPPImageClassifier` is created with * of the provided `MPImage`. Only use this method when the `ImageClassifier` is created with
* `MPPRunningModeLiveStream`. * `.liveStream`.
* *
* The object which needs to be continuously notified of the available results of image * The object which needs to be continuously notified of the available results of image
* classification must confirm to `MPPImageClassifierLiveStreamDelegate` protocol and implement the * classification must confirm to `ImageClassifierLiveStreamDelegate` protocol and implement the
* `imageClassifier:didFinishClassificationWithResult:timestampInMilliseconds:error:` delegate * `imageClassifier(_:didFinishClassificationWithResult:timestampInMilliseconds:error:)` delegate
* method. * method.
* *
* It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent * It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent
* to the image classifier. The input timestamps must be monotonically increasing. * to the image classifier. The input timestamps must be monotonically increasing.
* *
* This method supports classification of RGBA images. If your `MPPImage` has a source type of * This method supports classification of RGBA images. If your `MPImage` has a source type of
* `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer * `.pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the following
* must have one of the following pixel format types: * pixel format types:
* 1. kCVPixelFormatType_32BGRA * 1. kCVPixelFormatType_32BGRA
* 2. kCVPixelFormatType_32RGBA * 2. kCVPixelFormatType_32RGBA
* *
* If the input `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color * If the input `MPImage` has a source type of `.image` ensure that the color space is RGB with an
* space is RGB with an Alpha channel. * Alpha channel.
* *
* If this method is used for classifying live camera frames using `AVFoundation`, ensure that you * If this method is used for classifying live camera frames using `AVFoundation`, ensure that you
* request `AVCaptureVideoDataOutput` to output frames in `kCMPixelFormat_32RGBA` using its * request `AVCaptureVideoDataOutput` to output frames in `kCMPixelFormat_32RGBA` using its
* `videoSettings` property. * `videoSettings` property.
* *
* @param image A live stream image data of type `MPPImage` on which image classification is to be * @param image A live stream image data of type `MPImage` on which image classification is to be
* performed. * performed.
* @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input * @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input
* image is sent to the image classifier. The input timestamps must be monotonically increasing. * image is sent to the image classifier. The input timestamps must be monotonically increasing.
* @param roi A `CGRect` specifying the region of interest within the given live stream image data * @param roi A `CGRect` specifying the region of interest within the given live stream image data
* of type `MPPImage`, on which image classification should be performed. * of type `MPImage`, on which image classification should be performed.
* @param error An optional error parameter populated when there is an error in performing image
* classification on the input live stream image data.
* *
* @return `YES` if the image was sent to the task successfully, otherwise `NO`. * @return `true` if the image was sent to the task successfully, otherwise `false`.
*/ */
- (BOOL)classifyAsyncImage:(MPPImage *)image - (BOOL)classifyAsyncImage:(MPPImage *)image
timestampInMilliseconds:(NSInteger)timestampInMilliseconds timestampInMilliseconds:(NSInteger)timestampInMilliseconds

View File

@ -23,11 +23,11 @@ NS_ASSUME_NONNULL_BEGIN
@class MPPImageClassifier; @class MPPImageClassifier;
/** /**
* This protocol defines an interface for the delegates of `MPPImageClassifier` object to receive * This protocol defines an interface for the delegates of `ImageClassifier` object to receive
* results of asynchronous classification of images (i.e, when `runningMode = * results of asynchronous classification of images (i.e, when `runningMode =
* MPPRunningModeLiveStream`). * .liveStream`).
* *
* The delegate of `MPPImageClassifier` must adopt `MPPImageClassifierLiveStreamDelegate` protocol. * The delegate of `ImageClassifier` must adopt `ImageClassifierLiveStreamDelegate` protocol.
* The methods in this protocol are optional. * The methods in this protocol are optional.
*/ */
NS_SWIFT_NAME(ImageClassifierLiveStreamDelegate) NS_SWIFT_NAME(ImageClassifierLiveStreamDelegate)
@ -36,14 +36,14 @@ NS_SWIFT_NAME(ImageClassifierLiveStreamDelegate)
@optional @optional
/** /**
* This method notifies a delegate that the results of asynchronous classification of * This method notifies a delegate that the results of asynchronous classification of
* an image submitted to the `MPPImageClassifier` is available. * an image submitted to the `ImageClassifier` is available.
* *
* This method is called on a private serial queue created by the `MPPImageClassifier` * This method is called on a private serial queue created by the `ImageClassifier`
* for performing the asynchronous delegates calls. * for performing the asynchronous delegates calls.
* *
* @param imageClassifier The image classifier which performed the classification. * @param imageClassifier The image classifier which performed the classification.
* This is useful to test equality when there are multiple instances of `MPPImageClassifier`. * This is useful to test equality when there are multiple instances of `ImageClassifier`.
* @param result An `MPPImageClassifierResult` object that contains a list of image classifications. * @param result An `ImageClassifierResult` object that contains a list of image classifications.
* @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input * @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input
* image was sent to the image classifier. * image was sent to the image classifier.
* @param error An optional error parameter populated when there is an error in performing image * @param error An optional error parameter populated when there is an error in performing image
@ -57,27 +57,27 @@ NS_SWIFT_NAME(ImageClassifierLiveStreamDelegate)
@end @end
/** /**
* Options for setting up a `MPPImageClassifier`. * Options for setting up a `ImageClassifier`.
*/ */
NS_SWIFT_NAME(ImageClassifierOptions) NS_SWIFT_NAME(ImageClassifierOptions)
@interface MPPImageClassifierOptions : MPPTaskOptions <NSCopying> @interface MPPImageClassifierOptions : MPPTaskOptions <NSCopying>
/** /**
* Running mode of the image classifier task. Defaults to `MPPRunningModeImage`. * Running mode of the image classifier task. Defaults to `.image`.
* `MPPImageClassifier` can be created with one of the following running modes: * `ImageClassifier` can be created with one of the following running modes:
* 1. `MPPRunningModeImage`: The mode for performing classification on single image inputs. * 1. `.image`: The mode for performing classification on single image inputs.
* 2. `MPPRunningModeVideo`: The mode for performing classification on the decoded frames of a * 2. `.video`: The mode for performing classification on the decoded frames of a
* video. * video.
* 3. `MPPRunningModeLiveStream`: The mode for performing classification on a live stream of input * 3. `.liveStream`: The mode for performing classification on a live stream of input
* data, such as from the camera. * data, such as from the camera.
*/ */
@property(nonatomic) MPPRunningMode runningMode; @property(nonatomic) MPPRunningMode runningMode;
/** /**
* An object that confirms to `MPPImageClassifierLiveStreamDelegate` protocol. This object must * An object that confirms to `ImageClassifierLiveStreamDelegate` protocol. This object must
* implement `objectDetector:didFinishDetectionWithResult:timestampInMilliseconds:error:` to receive * implement `objectDetector(_:didFinishDetectionWithResult:timestampInMilliseconds:error:)` to
* the results of asynchronous classification on images (i.e, when `runningMode = * receive the results of asynchronous classification on images (i.e, when `runningMode =
* MPPRunningModeLiveStream`). * .liveStream`).
*/ */
@property(nonatomic, weak, nullable) id<MPPImageClassifierLiveStreamDelegate> @property(nonatomic, weak, nullable) id<MPPImageClassifierLiveStreamDelegate>
imageClassifierLiveStreamDelegate; imageClassifierLiveStreamDelegate;

View File

@ -18,23 +18,23 @@
NS_ASSUME_NONNULL_BEGIN NS_ASSUME_NONNULL_BEGIN
/** Represents the classification results generated by `MPPImageClassifier`. **/ /** Represents the classification results generated by `ImageClassifier`. **/
NS_SWIFT_NAME(ImageClassifierResult) NS_SWIFT_NAME(ImageClassifierResult)
@interface MPPImageClassifierResult : MPPTaskResult @interface MPPImageClassifierResult : MPPTaskResult
/** The `MPPClassificationResult` instance containing one set of results per classifier head. **/ /** The `ClassificationResult` instance containing one set of results per classifier head. **/
@property(nonatomic, readonly) MPPClassificationResult *classificationResult; @property(nonatomic, readonly) MPPClassificationResult *classificationResult;
/** /**
* Initializes a new `MPPImageClassifierResult` with the given `MPPClassificationResult` and * Initializes a new `ImageClassifierResult` with the given `ClassificationResult` and
* timestamp (in milliseconds). * timestamp (in milliseconds).
* *
* @param classificationResult The `MPPClassificationResult` instance containing one set of results * @param classificationResult The `ClassificationResult` instance containing one set of results
* per classifier head. * per classifier head.
* @param timestampInMilliseconds The timestamp (in milliseconds) for this result. * @param timestampInMilliseconds The timestamp (in milliseconds) for this result.
* *
* @return An instance of `MPPImageClassifierResult` initialized with the given * @return An instance of `ImageClassifierResult` initialized with the given
* `MPPClassificationResult` and timestamp (in milliseconds). * `ClassificationResult` and timestamp (in milliseconds).
*/ */
- (instancetype)initWithClassificationResult:(MPPClassificationResult *)classificationResult - (instancetype)initWithClassificationResult:(MPPClassificationResult *)classificationResult
timestampInMilliseconds:(NSInteger)timestampInMilliseconds; timestampInMilliseconds:(NSInteger)timestampInMilliseconds;

View File

@ -64,52 +64,50 @@ NS_SWIFT_NAME(ObjectDetector)
@interface MPPObjectDetector : NSObject @interface MPPObjectDetector : NSObject
/** /**
* Creates a new instance of `MPPObjectDetector` from an absolute path to a TensorFlow Lite model * Creates a new instance of `ObjectDetector` from an absolute path to a TensorFlow Lite model
* file stored locally on the device and the default `MPPObjectDetector`. * file stored locally on the device and the default `ObjectDetector`.
* *
* @param modelPath An absolute path to a TensorFlow Lite model file stored locally on the device. * @param modelPath An absolute path to a TensorFlow Lite model file stored locally on the device.
* @param error An optional error parameter populated when there is an error in initializing the * @param error An optional error parameter populated when there is an error in initializing the
* object detector. * object detector.
* *
* @return A new instance of `MPPObjectDetector` with the given model path. `nil` if there is an * @return A new instance of `ObjectDetector` with the given model path. `nil` if there is an
* error in initializing the object detector. * error in initializing the object detector.
*/ */
- (nullable instancetype)initWithModelPath:(NSString *)modelPath error:(NSError **)error; - (nullable instancetype)initWithModelPath:(NSString *)modelPath error:(NSError **)error;
/** /**
* Creates a new instance of `MPPObjectDetector` from the given `MPPObjectDetectorOptions`. * Creates a new instance of `ObjectDetector` from the given `ObjectDetectorOptions`.
* *
* @param options The options of type `MPPObjectDetectorOptions` to use for configuring the * @param options The options of type `ObjectDetectorOptions` to use for configuring the
* `MPPObjectDetector`. * `ObjectDetector`.
* @param error An optional error parameter populated when there is an error in initializing the * @param error An optional error parameter populated when there is an error in initializing the
* object detector. * object detector.
* *
* @return A new instance of `MPPObjectDetector` with the given options. `nil` if there is an error * @return A new instance of `ObjectDetector` with the given options. `nil` if there is an error
* in initializing the object detector. * in initializing the object detector.
*/ */
- (nullable instancetype)initWithOptions:(MPPObjectDetectorOptions *)options - (nullable instancetype)initWithOptions:(MPPObjectDetectorOptions *)options
error:(NSError **)error NS_DESIGNATED_INITIALIZER; error:(NSError **)error NS_DESIGNATED_INITIALIZER;
/** /**
* Performs object detection on the provided MPPImage using the whole image as region of * Performs object detection on the provided MPImage using the whole image as region of
* interest. Rotation will be applied according to the `orientation` property of the provided * interest. Rotation will be applied according to the `orientation` property of the provided
* `MPPImage`. Only use this method when the `MPPObjectDetector` is created with * `MPImage`. Only use this method when the `ObjectDetector` is created with
* `MPPRunningModeImage`. * `.image`.
* *
* This method supports detecting objects in RGBA images. If your `MPPImage` has a source type of * This method supports detecting objects in RGBA images. If your `MPImage` has a source type of
* `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer * `.pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the following
* must have one of the following pixel format types: * pixel format types:
* 1. kCVPixelFormatType_32BGRA * 1. kCVPixelFormatType_32BGRA
* 2. kCVPixelFormatType_32RGBA * 2. kCVPixelFormatType_32RGBA
* *
* If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is * If your `MPImage` has a source type of `.image` ensure that the color space is
* RGB with an Alpha channel. * RGB with an Alpha channel.
* *
* @param image The `MPPImage` on which object detection is to be performed. * @param image The `.image` on which object detection is to be performed.
* @param error An optional error parameter populated when there is an error in performing object
* detection on the input image.
* *
* @return An `MPPObjectDetectorResult` object that contains a list of detections, each detection * @return An `ObjectDetectorResult` object that contains a list of detections, each detection
* has a bounding box that is expressed in the unrotated input frame of reference coordinates * has a bounding box that is expressed in the unrotated input frame of reference coordinates
* system, i.e. in `[0,image_width) x [0,image_height)`, which are the dimensions of the underlying * system, i.e. in `[0,image_width) x [0,image_height)`, which are the dimensions of the underlying
* image data. * image data.
@ -118,27 +116,25 @@ NS_SWIFT_NAME(ObjectDetector)
error:(NSError **)error NS_SWIFT_NAME(detect(image:)); error:(NSError **)error NS_SWIFT_NAME(detect(image:));
/** /**
* Performs object detection on the provided video frame of type `MPPImage` using the whole * Performs object detection on the provided video frame of type `MPImage` using the whole
* image as region of interest. Rotation will be applied according to the `orientation` property of * image as region of interest. Rotation will be applied according to the `orientation` property of
* the provided `MPPImage`. Only use this method when the `MPPObjectDetector` is created with * the provided `MPImage`. Only use this method when the `MPPObjectDetector` is created with
* `MPPRunningModeVideo`. * `.video`.
* *
* This method supports detecting objects in of RGBA images. If your `MPPImage` has a source type of * This method supports detecting objects in of RGBA images. If your `MPImage` has a source type of
* `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer * .pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the following
* must have one of the following pixel format types: * pixel format types:
* 1. kCVPixelFormatType_32BGRA * 1. kCVPixelFormatType_32BGRA
* 2. kCVPixelFormatType_32RGBA * 2. kCVPixelFormatType_32RGBA
* *
* If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is * If your `MPImage` has a source type of `.image` ensure that the color space is RGB with an Alpha
* RGB with an Alpha channel. * channel.
* *
* @param image The `MPPImage` on which object detection is to be performed. * @param image The `MPImage` on which object detection is to be performed.
* @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input * @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input
* timestamps must be monotonically increasing. * timestamps must be monotonically increasing.
* @param error An optional error parameter populated when there is an error in performing object
* detection on the input image.
* *
* @return An `MPPObjectDetectorResult` object that contains a list of detections, each detection * @return An `ObjectDetectorResult` object that contains a list of detections, each detection
* has a bounding box that is expressed in the unrotated input frame of reference coordinates * has a bounding box that is expressed in the unrotated input frame of reference coordinates
* system, i.e. in `[0,image_width) x [0,image_height)`, which are the dimensions of the underlying * system, i.e. in `[0,image_width) x [0,image_height)`, which are the dimensions of the underlying
* image data. * image data.
@ -149,26 +145,26 @@ NS_SWIFT_NAME(ObjectDetector)
NS_SWIFT_NAME(detect(videoFrame:timestampInMilliseconds:)); NS_SWIFT_NAME(detect(videoFrame:timestampInMilliseconds:));
/** /**
* Sends live stream image data of type `MPPImage` to perform object detection using the whole * Sends live stream image data of type `MPImage` to perform object detection using the whole
* image as region of interest. Rotation will be applied according to the `orientation` property of * image as region of interest. Rotation will be applied according to the `orientation` property of
* the provided `MPPImage`. Only use this method when the `MPPObjectDetector` is created with * the provided `MPImage`. Only use this method when the `ObjectDetector` is created with
* `MPPRunningModeLiveStream`. * `.liveStream`.
* *
* The object which needs to be continuously notified of the available results of object * The object which needs to be continuously notified of the available results of object
* detection must confirm to `MPPObjectDetectorLiveStreamDelegate` protocol and implement the * detection must confirm to `ObjectDetectorLiveStreamDelegate` protocol and implement the
* `objectDetector:didFinishDetectionWithResult:timestampInMilliseconds:error:` delegate method. * `objectDetector(_:didFinishDetectionWithResult:timestampInMilliseconds:error:)` delegate method.
* *
* It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent * It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent
* to the object detector. The input timestamps must be monotonically increasing. * to the object detector. The input timestamps must be monotonically increasing.
* *
* This method supports detecting objects in RGBA images. If your `MPPImage` has a source type of * This method supports detecting objects in RGBA images. If your `MPImage` has a source type of
* `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer * `.pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the following
* must have one of the following pixel format types: * pixel format types:
* 1. kCVPixelFormatType_32BGRA * 1. kCVPixelFormatType_32BGRA
* 2. kCVPixelFormatType_32RGBA * 2. kCVPixelFormatType_32RGBA
* *
* If the input `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color * If the input `MPImage` has a source type of `.image` ensure that the color space is RGB with an
* space is RGB with an Alpha channel. * Alpha channel.
* *
* If this method is used for detecting objects in live camera frames using `AVFoundation`, ensure * If this method is used for detecting objects in live camera frames using `AVFoundation`, ensure
* that you request `AVCaptureVideoDataOutput` to output frames in `kCMPixelFormat_32RGBA` using its * that you request `AVCaptureVideoDataOutput` to output frames in `kCMPixelFormat_32RGBA` using its
@ -178,10 +174,8 @@ NS_SWIFT_NAME(ObjectDetector)
* performed. * performed.
* @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input * @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input
* image is sent to the object detector. The input timestamps must be monotonically increasing. * image is sent to the object detector. The input timestamps must be monotonically increasing.
* @param error An optional error parameter populated when there is an error in performing object
* detection on the input live stream image data.
* *
* @return `YES` if the image was sent to the task successfully, otherwise `NO`. * @return `true` if the image was sent to the task successfully, otherwise `false`.
*/ */
- (BOOL)detectAsyncInImage:(MPPImage *)image - (BOOL)detectAsyncInImage:(MPPImage *)image
timestampInMilliseconds:(NSInteger)timestampInMilliseconds timestampInMilliseconds:(NSInteger)timestampInMilliseconds

View File

@ -81,8 +81,7 @@ static NSString *const kTaskName = @"objectDetector";
} }
MPPObjectDetectorResult *result = [MPPObjectDetectorResult MPPObjectDetectorResult *result = [MPPObjectDetectorResult
objectDetectorResultWithDetectionsPacket: objectDetectorResultWithDetectionsPacket:outputPacketMap[kDetectionsStreamName.cppString]];
outputPacketMap[kDetectionsStreamName.cppString]];
NSInteger timeStampInMilliseconds = NSInteger timeStampInMilliseconds =
outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() / outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() /

View File

@ -23,11 +23,11 @@ NS_ASSUME_NONNULL_BEGIN
@class MPPObjectDetector; @class MPPObjectDetector;
/** /**
* This protocol defines an interface for the delegates of `MPPObjectDetector` object to receive * This protocol defines an interface for the delegates of `ObjectDetector` object to receive
* results of performing asynchronous object detection on images (i.e, when `runningMode` = * results of performing asynchronous object detection on images (i.e, when `runningMode` =
* `MPPRunningModeLiveStream`). * `.liveStream`).
* *
* The delegate of `MPPObjectDetector` must adopt `MPPObjectDetectorLiveStreamDelegate` protocol. * The delegate of `ObjectDetector` must adopt `ObjectDetectorLiveStreamDelegate` protocol.
* The methods in this protocol are optional. * The methods in this protocol are optional.
*/ */
NS_SWIFT_NAME(ObjectDetectorLiveStreamDelegate) NS_SWIFT_NAME(ObjectDetectorLiveStreamDelegate)
@ -37,14 +37,14 @@ NS_SWIFT_NAME(ObjectDetectorLiveStreamDelegate)
/** /**
* This method notifies a delegate that the results of asynchronous object detection of * This method notifies a delegate that the results of asynchronous object detection of
* an image submitted to the `MPPObjectDetector` is available. * an image submitted to the `ObjectDetector` is available.
* *
* This method is called on a private serial dispatch queue created by the `MPPObjectDetector` * This method is called on a private serial dispatch queue created by the `ObjectDetector`
* for performing the asynchronous delegates calls. * for performing the asynchronous delegates calls.
* *
* @param objectDetector The object detector which performed the object detection. * @param objectDetector The object detector which performed the object detection.
* This is useful to test equality when there are multiple instances of `MPPObjectDetector`. * This is useful to test equality when there are multiple instances of `ObjectDetector`.
* @param result The `MPPObjectDetectorResult` object that contains a list of detections, each * @param result The `ObjectDetectorResult` object that contains a list of detections, each
* detection has a bounding box that is expressed in the unrotated input frame of reference * detection has a bounding box that is expressed in the unrotated input frame of reference
* coordinates system, i.e. in `[0,image_width) x [0,image_height)`, which are the dimensions of the * coordinates system, i.e. in `[0,image_width) x [0,image_height)`, which are the dimensions of the
* underlying image data. * underlying image data.
@ -60,26 +60,27 @@ NS_SWIFT_NAME(ObjectDetectorLiveStreamDelegate)
NS_SWIFT_NAME(objectDetector(_:didFinishDetection:timestampInMilliseconds:error:)); NS_SWIFT_NAME(objectDetector(_:didFinishDetection:timestampInMilliseconds:error:));
@end @end
/** Options for setting up a `MPPObjectDetector`. */ /** Options for setting up a `ObjectDetector`. */
NS_SWIFT_NAME(ObjectDetectorOptions) NS_SWIFT_NAME(ObjectDetectorOptions)
@interface MPPObjectDetectorOptions : MPPTaskOptions <NSCopying> @interface MPPObjectDetectorOptions : MPPTaskOptions <NSCopying>
/** /**
* Running mode of the object detector task. Defaults to `MPPRunningModeImage`. * Running mode of the object detector task. Defaults to `.image`.
* `MPPObjectDetector` can be created with one of the following running modes: * `ObjectDetector` can be created with one of the following running modes:
* 1. `MPPRunningModeImage`: The mode for performing object detection on single image inputs. * 1. `.image`: The mode for performing object detection on single image inputs.
* 2. `MPPRunningModeVideo`: The mode for performing object detection on the decoded frames of a * 2. `.video`: The mode for performing object detection on the decoded frames of a
* video. * video.
* 3. `MPPRunningModeLiveStream`: The mode for performing object detection on a live stream of * 3. `.liveStream`: The mode for performing object detection on a live stream of
* input data, such as from the camera. * input data, such as from the camera.
*/ */
@property(nonatomic) MPPRunningMode runningMode; @property(nonatomic) MPPRunningMode runningMode;
/** /**
* An object that confirms to `MPPObjectDetectorLiveStreamDelegate` protocol. This object must * An object that confirms to `ObjectDetectorLiveStreamDelegate` protocol. This object must
* implement `objectDetector:didFinishDetectionWithResult:timestampInMilliseconds:error:` to receive * implement `objectDetector(_:didFinishDetectionWithResult:timestampInMilliseconds:error:)` to
* the results of performing asynchronous object detection on images (i.e, when `runningMode` = * receive the results of performing asynchronous object detection on images (i.e, when
* `MPPRunningModeLiveStream`). * `runningMode` =
* `.liveStream`).
*/ */
@property(nonatomic, weak, nullable) id<MPPObjectDetectorLiveStreamDelegate> @property(nonatomic, weak, nullable) id<MPPObjectDetectorLiveStreamDelegate>
objectDetectorLiveStreamDelegate; objectDetectorLiveStreamDelegate;

View File

@ -18,27 +18,27 @@
NS_ASSUME_NONNULL_BEGIN NS_ASSUME_NONNULL_BEGIN
/** Represents the detection results generated by `MPPObjectDetector`. */ /** Represents the detection results generated by `ObjectDetector`. */
NS_SWIFT_NAME(ObjectDetectorResult) NS_SWIFT_NAME(ObjectDetectorResult)
@interface MPPObjectDetectorResult : MPPTaskResult @interface MPPObjectDetectorResult : MPPTaskResult
/** /**
* The array of `MPPDetection` objects each of which has a bounding box that is expressed in the * The array of `Detection` objects each of which has a bounding box that is expressed in the
* unrotated input frame of reference coordinates system, i.e. in `[0,image_width) x * unrotated input frame of reference coordinates system, i.e. in `[0,image_width) x
* [0,image_height)`, which are the dimensions of the underlying image data. * [0,image_height)`, which are the dimensions of the underlying image data.
*/ */
@property(nonatomic, readonly) NSArray<MPPDetection *> *detections; @property(nonatomic, readonly) NSArray<MPPDetection *> *detections;
/** /**
* Initializes a new `MPPObjectDetectorResult` with the given array of detections and timestamp (in * Initializes a new `ObjectDetectorResult` with the given array of detections and timestamp (in
* milliseconds). * milliseconds).
* *
* @param detections An array of `MPPDetection` objects each of which has a bounding box that is * @param detections An array of `Detection` objects each of which has a bounding box that is
* expressed in the unrotated input frame of reference coordinates system, i.e. in `[0,image_width) * expressed in the unrotated input frame of reference coordinates system, i.e. in `[0,image_width)
* x [0,image_height)`, which are the dimensions of the underlying image data. * x [0,image_height)`, which are the dimensions of the underlying image data.
* @param timestampInMilliseconds The timestamp (in milliseconds) for this result. * @param timestampInMilliseconds The timestamp (in milliseconds) for this result.
* *
* @return An instance of `MPPObjectDetectorResult` initialized with the given array of detections * @return An instance of `ObjectDetectorResult` initialized with the given array of detections
* and timestamp (in milliseconds). * and timestamp (in milliseconds).
*/ */
- (instancetype)initWithDetections:(NSArray<MPPDetection *> *)detections - (instancetype)initWithDetections:(NSArray<MPPDetection *> *)detections