Updated iOS docs to use swift names in place of objective c names

This commit is contained in:
Prianka Liz Kariat 2023-08-25 18:06:50 +05:30
parent 6e6978cdbf
commit 3f0ec5969b
18 changed files with 342 additions and 384 deletions

View File

@ -44,14 +44,14 @@ NS_SWIFT_NAME(ResultCategory)
@property(nonatomic, readonly, nullable) NSString *displayName;
/**
* Initializes a new `MPPCategory` with the given index, score, category name and display name.
* Initializes a new `Category` with the given index, score, category name and display name.
*
* @param index The index of the label in the corresponding label file.
* @param score The probability score of this label category.
* @param categoryName The label of this category object.
* @param displayName The display name of the label.
*
* @return An instance of `MPPCategory` initialized with the given index, score, category name and
* @return An instance of `Category` initialized with the given index, score, category name and
* display name.
*/
- (instancetype)initWithIndex:(NSInteger)index

View File

@ -32,32 +32,32 @@ NS_SWIFT_NAME(Classifications)
/** The optional name of the classifier head, which is the corresponding tensor metadata name. */
@property(nonatomic, readonly, nullable) NSString *headName;
/** An array of `MPPCategory` objects containing the predicted categories. */
/** An array of `Category` objects containing the predicted categories. */
@property(nonatomic, readonly) NSArray<MPPCategory *> *categories;
/**
* Initializes a new `MPPClassifications` object with the given head index and array of categories.
* Initializes a new `Classifications` object with the given head index and array of categories.
* Head name is initialized to `nil`.
*
* @param headIndex The index of the classifier head.
* @param categories An array of `MPPCategory` objects containing the predicted categories.
* @param categories An array of `Category` objects containing the predicted categories.
*
* @return An instance of `MPPClassifications` initialized with the given head index and
* @return An instance of `Classifications` initialized with the given head index and
* array of categories.
*/
- (instancetype)initWithHeadIndex:(NSInteger)headIndex
categories:(NSArray<MPPCategory *> *)categories;
/**
* Initializes a new `MPPClassifications` with the given head index, head name and array of
* Initializes a new `Classifications` with the given head index, head name and array of
* categories.
*
* @param headIndex The index of the classifier head.
* @param headName The name of the classifier head, which is the corresponding tensor metadata
* name.
* @param categories An array of `MPPCategory` objects containing the predicted categories.
* @param categories An array of `Category` objects containing the predicted categories.
*
* @return An object of `MPPClassifications` initialized with the given head index, head name and
* @return An object of `Classifications` initialized with the given head index, head name and
* array of categories.
*/
- (instancetype)initWithHeadIndex:(NSInteger)headIndex
@ -78,7 +78,7 @@ NS_SWIFT_NAME(ClassificationResult)
@interface MPPClassificationResult : NSObject
/**
* An Array of `MPPClassifications` objects containing the predicted categories for each head of
* An Array of `Classifications` objects containing the predicted categories for each head of
* the model.
*/
@property(nonatomic, readonly) NSArray<MPPClassifications *> *classifications;
@ -93,15 +93,15 @@ NS_SWIFT_NAME(ClassificationResult)
@property(nonatomic, readonly) NSInteger timestampInMilliseconds;
/**
* Initializes a new `MPPClassificationResult` with the given array of classifications and time
* Initializes a new `ClassificationResult` with the given array of classifications and time
* stamp (in milliseconds).
*
* @param classifications An Array of `MPPClassifications` objects containing the predicted
* @param classifications An Array of `Classifications` objects containing the predicted
* categories for each head of the model.
* @param timestampInMilliseconds The timestamp (in milliseconds) of the start of the chunk of data
* corresponding to these results.
*
* @return An instance of `MPPClassificationResult` initialized with the given array of
* @return An instance of `ClassificationResult` initialized with the given array of
* classifications and timestamp (in milliseconds).
*/
- (instancetype)initWithClassifications:(NSArray<MPPClassifications *> *)classifications

View File

@ -35,7 +35,7 @@ NS_SWIFT_NAME(NormalizedKeypoint)
@property(nonatomic, readonly) float score;
/**
* Initializes a new `MPPNormalizedKeypoint` object with the given location, label and score.
* Initializes a new `NormalizedKeypoint` object with the given location, label and score.
* You must pass 0.0 for `score` if it is not present.
*
* @param location The (x,y) coordinates location of the normalized keypoint.
@ -43,7 +43,7 @@ NS_SWIFT_NAME(NormalizedKeypoint)
* @param score The optional score of the normalized keypoint. You must pass 0.0 for score if it
* is not present.
*
* @return An instance of `MPPNormalizedKeypoint` initialized with the given given location, label
* @return An instance of `NormalizedKeypoint` initialized with the given given location, label
* and score.
*/
- (instancetype)initWithLocation:(CGPoint)location
@ -56,18 +56,18 @@ NS_SWIFT_NAME(NormalizedKeypoint)
@end
/** Represents one detected object in the results of `MPPObjectDetector`. */
/** Represents one detected object in the results of `ObjectDetector`. */
NS_SWIFT_NAME(Detection)
@interface MPPDetection : NSObject
/** An array of `MPPCategory` objects containing the predicted categories. */
/** An array of `Category` objects containing the predicted categories. */
@property(nonatomic, readonly) NSArray<MPPCategory *> *categories;
/** The bounding box of the detected object. */
@property(nonatomic, readonly) CGRect boundingBox;
/**
* An optional array of `MPPNormalizedKeypoint` objects associated with the detection. Keypoints
* An optional array of `NormalizedKeypoint` objects associated with the detection. Keypoints
* represent interesting points related to the detection. For example, the keypoints represent the
* eyes, ear and mouth from the from detection model. In template matching detection, e.g. KNIFT,
* they can instead represent the feature points for template matching.
@ -75,18 +75,18 @@ NS_SWIFT_NAME(Detection)
@property(nonatomic, readonly, nullable) NSArray<MPPNormalizedKeypoint *> *keypoints;
/**
* Initializes a new `MPPDetection` object with the given array of categories, bounding box and
* Initializes a new `Detection` object with the given array of categories, bounding box and
* optional array of keypoints;
*
* @param categories A list of `MPPCategory` objects that contain category name, display name,
* @param categories A list of `Category` objects that contain category name, display name,
* score, and the label index.
* @param boundingBox A `CGRect` that represents the bounding box.
* @param keypoints: An optional array of `MPPNormalizedKeypoint` objects associated with the
* @param keypoints: An optional array of `NormalizedKeypoint` objects associated with the
* detection. Keypoints represent interesting points related to the detection. For example, the
* keypoints represent the eyes, ear and mouth from the face detection model. In template matching
* detection, e.g. KNIFT, they can instead represent the feature points for template matching.
*
* @return An instance of `MPPDetection` initialized with the given array of categories, bounding
* @return An instance of `Detection` initialized with the given array of categories, bounding
* box and `nil` keypoints.
*/
- (instancetype)initWithCategories:(NSArray<MPPCategory *> *)categories

View File

@ -49,13 +49,13 @@ NS_SWIFT_NAME(Landmark)
@property(nonatomic, readonly, nullable) NSNumber *presence;
/**
* Initializes a new `MPPLandmark` object with the given x, y and z coordinates.
* Initializes a new `Landmark` object with the given x, y and z coordinates.
*
* @param x The x coordinates of the landmark.
* @param y The y coordinates of the landmark.
* @param z The z coordinates of the landmark.
*
* @return An instance of `MPPLandmark` initialized with the given x, y and z coordinates.
* @return An instance of `Landmark` initialized with the given x, y and z coordinates.
*/
- (instancetype)initWithX:(float)x
y:(float)y
@ -103,13 +103,13 @@ NS_SWIFT_NAME(NormalizedLandmark)
@property(nonatomic, readonly, nullable) NSNumber *presence;
/**
* Initializes a new `MPPNormalizedLandmark` object with the given x, y and z coordinates.
* Initializes a new `NormalizedLandmark` object with the given x, y and z coordinates.
*
* @param x The x coordinates of the landmark.
* @param y The y coordinates of the landmark.
* @param z The z coordinates of the landmark.
*
* @return An instance of `MPPNormalizedLandmark` initialized with the given x, y and z coordinates.
* @return An instance of `NormalizedLandmark` initialized with the given x, y and z coordinates.
*/
- (instancetype)initWithX:(float)x
y:(float)y

View File

@ -40,10 +40,10 @@ NS_SWIFT_NAME(MPImage)
@property(nonatomic, readonly) CGFloat height;
/**
* The display orientation of the image. If `imageSourceType` is `MPPImageSourceTypeImage`, the
* The display orientation of the image. If `imageSourceType` is `.image`, the
* default value is `image.imageOrientation`; otherwise the default value is
* `UIImageOrientationUp`. If the `MPPImage` is being used as input for any MediaPipe vision tasks
* and is set to any orientation other than `UIImageOrientationUp`, inference will be performed on
* `UIImage.Orientation.up`. If the `MPImage` is being used as input for any MediaPipe vision tasks
* and is set to any orientation other than `UIImage.Orientation.up`, inference will be performed on
* a rotated copy of the image according to the orientation.
*/
@property(nonatomic, readonly) UIImageOrientation orientation;
@ -54,46 +54,48 @@ NS_SWIFT_NAME(MPImage)
/** The source image. `nil` if `imageSourceType` is not `.image`. */
@property(nonatomic, readonly, nullable) UIImage *image;
/** The source pixel buffer. `nil` if `imageSourceType` is not `.pixelBuffer`. */
/** The source pixel buffer. `nil` if ``imageSourceType`` is not `.pixelBuffer`. */
@property(nonatomic, readonly, nullable) CVPixelBufferRef pixelBuffer;
/** The source sample buffer. `nil` if `imageSourceType` is not `.sampleBuffer`. */
/** The source sample buffer. `nil` if ``imageSourceType`` is not `.sampleBuffer`. */
@property(nonatomic, readonly, nullable) CMSampleBufferRef sampleBuffer;
/**
* Initializes an `MPPImage` object with the given `UIImage`.
* The orientation of the newly created `MPPImage` will be equal to the `imageOrientation` of
* Initializes an `MPImage` object with the given `UIImage`.
*
* The orientation of the newly created `MPImage` will be equal to the `imageOrientation` of
* `UIImage` and when sent to the vision tasks for inference, rotation will be applied accordingly.
* To create an `MPPImage` with an orientation different from its `imageOrientation`, please use
* `[MPPImage initWithImage:orientation:error:]`.
* To create an `MPImage` with an orientation different from its `imageOrientation`, please use
* `MPImage(uiImage:orientation:)s`.
*
* @param image The image to use as the source. Its `CGImage` property must not be `NULL`.
* @param error An optional error parameter populated when there is an error in initializing the
* `MPPImage`.
* `MPImage`.
*
* @return A new `MPPImage` instance with the given image as the source. `nil` if the given
* @return A new `MPImage` instance with the given image as the source. `nil` if the given
* `image` is `nil` or invalid.
*/
- (nullable instancetype)initWithUIImage:(UIImage *)image error:(NSError **)error;
/**
* Initializes an `MPPImage` object with the given `UIImage` and orientation. The given orientation
* will be used to calculate the rotation to be applied to the `UIImage` before inference is
* Initializes an `MPImage` object with the given `UIImage` and orientation.
*
* The given orientation will be used to calculate the rotation to be applied to the `UIImage` before inference is
* performed on it by the vision tasks. The `imageOrientation` stored in the `UIImage` is ignored
* when `MPImage` objects created by this method are sent to the vision tasks for inference. Use
* `[MPPImage initWithImage:orientation:error:]` to initialize images with the `imageOrientation` of
* `MPImage(uiImage:)` to initialize images with the `imageOrientation` of
* `UIImage`.
*
* If the newly created `MPPImage` is used as input for any MediaPipe vision tasks, inference
* If the newly created `MPImage` is used as input for any MediaPipe vision tasks, inference
* will be performed on a copy of the image rotated according to the orientation.
*
* @param image The image to use as the source. Its `CGImage` property must not be `NULL`.
* @param orientation The display orientation of the image. This will be stored in the property
* `orientation` `MPPImage` and will override the `imageOrientation` of the passed in `UIImage`.
* `orientation` `MPImage` and will override the `imageOrientation` of the passed in `UIImage`.
* @param error An optional error parameter populated when there is an error in initializing the
* `MPPImage`.
* `MPImage`.
*
* @return A new `MPPImage` instance with the given image as the source. `nil` if the given
* @return A new `MPImage` instance with the given image as the source. `nil` if the given
* `image` is `nil` or invalid.
*/
- (nullable instancetype)initWithUIImage:(UIImage *)image
@ -101,36 +103,36 @@ NS_SWIFT_NAME(MPImage)
error:(NSError **)error NS_DESIGNATED_INITIALIZER;
/**
* Initializes an `MPPImage` object with the given pixel buffer.
* Initializes an `MPImage` object with the given pixel buffer.
*
* The orientation of the newly created `MPPImage` will be `UIImageOrientationUp`.
* The orientation of the newly created `MPImage` will be `UIImageOrientationUp`.
* Hence, if this image is used as input for any MediaPipe vision tasks, inference will be
* performed on the it without any rotation. To create an `MPPImage` with a different
* orientation, please use `[MPPImage initWithPixelBuffer:orientation:error:]`.
* performed on the it without any rotation. To create an `MPImage` with a different
* orientation, please use `MPImage(pixelBuffer:orientation:)`.
*
* @param pixelBuffer The pixel buffer to use as the source. It will be retained by the new
* `MPPImage` instance for the duration of its lifecycle.
* `MPImage` instance for the duration of its lifecycle.
* @param error An optional error parameter populated when there is an error in initializing the
* `MPPImage`.
* `MPImage`.
*
* @return A new `MPPImage` instance with the given pixel buffer as the source. `nil` if the
* @return A new `MPImage` instance with the given pixel buffer as the source. `nil` if the
* given pixel buffer is `nil` or invalid.
*/
- (nullable instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer error:(NSError **)error;
/**
* Initializes an `MPPImage` object with the given pixel buffer and orientation.
* Initializes an `MPImage` object with the given pixel buffer and orientation.
*
* If the newly created `MPPImage` is used as input for any MediaPipe vision tasks, inference
* If the newly created `MPImage` is used as input for any MediaPipe vision tasks, inference
* will be performed on a copy of the image rotated according to the orientation.
*
* @param pixelBuffer The pixel buffer to use as the source. It will be retained by the new
* `MPPImage` instance for the duration of its lifecycle.
* `MPImage` instance for the duration of its lifecycle.
* @param orientation The display orientation of the image.
* @param error An optional error parameter populated when there is an error in initializing the
* `MPPImage`.
* `MPImage`.
*
* @return A new `MPPImage` instance with the given orientation and pixel buffer as the source.
* @return A new `MPImage` instance with the given orientation and pixel buffer as the source.
* `nil` if the given pixel buffer is `nil` or invalid.
*/
- (nullable instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer
@ -138,35 +140,35 @@ NS_SWIFT_NAME(MPImage)
error:(NSError **)error NS_DESIGNATED_INITIALIZER;
/**
* Initializes an `MPPImage` object with the given sample buffer.
* Initializes an `MPImage` object with the given sample buffer.
*
* The orientation of the newly created `MPPImage` will be `UIImageOrientationUp`.
* The orientation of the newly created `MPImage` will be `UIImageOrientationUp`.
* Hence, if this image is used as input for any MediaPipe vision tasks, inference will be
* performed on the it without any rotation. To create an `MPPImage` with a different orientation,
* please use `[MPPImage initWithSampleBuffer:orientation:error:]`.
* performed on the it without any rotation. To create an `MPImage` with a different orientation,
* please use `MPImage(sampleBuffer:orientation:)`.
*
* @param sampleBuffer The sample buffer to use as the source. It will be retained by the new
* `MPPImage` instance for the duration of its lifecycle. The sample buffer must be based on
* `MPImage` instance for the duration of its lifecycle. The sample buffer must be based on
* a pixel buffer (not compressed data). In practice, it should be the video output of the
* camera on an iOS device, not other arbitrary types of `CMSampleBuffer`s.
* @return A new `MPPImage` instance with the given sample buffer as the source. `nil` if the
* @return A new `MPImage` instance with the given sample buffer as the source. `nil` if the
* given sample buffer is `nil` or invalid.
*/
- (nullable instancetype)initWithSampleBuffer:(CMSampleBufferRef)sampleBuffer
error:(NSError **)error;
/**
* Initializes an `MPPImage` object with the given sample buffer and orientation.
* Initializes an `MPImage` object with the given sample buffer and orientation.
*
* If the newly created `MPPImage` is used as input for any MediaPipe vision tasks, inference
* If the newly created `MPImage` is used as input for any MediaPipe vision tasks, inference
* will be performed on a copy of the image rotated according to the orientation.
*
* @param sampleBuffer The sample buffer to use as the source. It will be retained by the new
* `MPPImage` instance for the duration of its lifecycle. The sample buffer must be based on
* `MPImage` instance for the duration of its lifecycle. The sample buffer must be based on
* a pixel buffer (not compressed data). In practice, it should be the video output of the
* camera on an iOS device, not other arbitrary types of `CMSampleBuffer`s.
* @param orientation The display orientation of the image.
* @return A new `MPPImage` instance with the given orientation and sample buffer as the source.
* @return A new `MPImage` instance with the given orientation and sample buffer as the source.
* `nil` if the given sample buffer is `nil` or invalid.
*/
- (nullable instancetype)initWithSampleBuffer:(CMSampleBufferRef)sampleBuffer

View File

@ -57,27 +57,23 @@ NS_SWIFT_NAME(FaceDetector)
@interface MPPFaceDetector : NSObject
/**
* Creates a new instance of `MPPFaceDetector` from an absolute path to a TensorFlow Lite model
* file stored locally on the device and the default `MPPFaceDetector`.
* Creates a new instance of `FaceDetector` from an absolute path to a TensorFlow Lite model
* file stored locally on the device and the default `FaceDetector`.
*
* @param modelPath An absolute path to a TensorFlow Lite model file stored locally on the device.
* @param error An optional error parameter populated when there is an error in initializing the
* face detector.
*
* @return A new instance of `MPPFaceDetector` with the given model path. `nil` if there is an
* @return A new instance of `FaceDetector` with the given model path. `nil` if there is an
* error in initializing the face detector.
*/
- (nullable instancetype)initWithModelPath:(NSString *)modelPath error:(NSError **)error;
/**
* Creates a new instance of `MPPFaceDetector` from the given `MPPFaceDetectorOptions`.
* Creates a new instance of `FaceDetector` from the given `FaceDetectorOptions`.
*
* @param options The options of type `MPPFaceDetectorOptions` to use for configuring the
* `MPPFaceDetector`.
* @param error An optional error parameter populated when there is an error in initializing the
* face detector.
* @param options The options of type `FaceDetectorOptions` to use for configuring the
* `FaceDetector`.
*
* @return A new instance of `MPPFaceDetector` with the given options. `nil` if there is an error
* @return A new instance of `FaceDetector` with the given options. `nil` if there is an error
* in initializing the face detector.
*/
- (nullable instancetype)initWithOptions:(MPPFaceDetectorOptions *)options
@ -86,23 +82,21 @@ NS_SWIFT_NAME(FaceDetector)
/**
* Performs face detection on the provided MPPImage using the whole image as region of
* interest. Rotation will be applied according to the `orientation` property of the provided
* `MPPImage`. Only use this method when the `MPPFaceDetector` is created with
* `MPPRunningModeImage`.
* `MPImage`. Only use this method when the `MPPFaceDetector` is created with running mode
* `.image`.
*
* This method supports classification of RGBA images. If your `MPPImage` has a source type of
* `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer
* must have one of the following pixel format types:
* This method supports classification of RGBA images. If your `MPImage` has a source type of
* `.pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the
* following pixel format types:
* 1. kCVPixelFormatType_32BGRA
* 2. kCVPixelFormatType_32RGBA
*
* If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is
* If your `MPImage` has a source type of `.image` ensure that the color space is
* RGB with an Alpha channel.
*
* @param image The `MPPImage` on which face detection is to be performed.
* @param error An optional error parameter populated when there is an error in performing face
* detection on the input image.
* @param image The `MPImage` on which face detection is to be performed.
*
* @return An `MPPFaceDetectorResult` face that contains a list of detections, each detection
* @return An `FaceDetectorResult` face that contains a list of detections, each detection
* has a bounding box that is expressed in the unrotated input frame of reference coordinates
* system, i.e. in `[0,image_width) x [0,image_height)`, which are the dimensions of the underlying
* image data.
@ -111,27 +105,25 @@ NS_SWIFT_NAME(FaceDetector)
error:(NSError **)error NS_SWIFT_NAME(detect(image:));
/**
* Performs face detection on the provided video frame of type `MPPImage` using the whole
* Performs face detection on the provided video frame of type `MPImage` using the whole
* image as region of interest. Rotation will be applied according to the `orientation` property of
* the provided `MPPImage`. Only use this method when the `MPPFaceDetector` is created with
* `MPPRunningModeVideo`.
* the provided `MPImage`. Only use this method when the `FaceDetector` is created with running
* mode `.video`.
*
* This method supports classification of RGBA images. If your `MPPImage` has a source type of
* `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer
* must have one of the following pixel format types:
* This method supports classification of RGBA images. If your `MPImage` has a source type of
* `.pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the
* following pixel format types:
* 1. kCVPixelFormatType_32BGRA
* 2. kCVPixelFormatType_32RGBA
*
* If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is
* RGB with an Alpha channel.
* If your `MPImage` has a source type of `.image` ensure that the color space is RGB with an Alpha
* channel.
*
* @param image The `MPPImage` on which face detection is to be performed.
* @param image The `MPImage` on which face detection is to be performed.
* @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input
* timestamps must be monotonically increasing.
* @param error An optional error parameter populated when there is an error in performing face
* detection on the input image.
*
* @return An `MPPFaceDetectorResult` face that contains a list of detections, each detection
* @return An `FaceDetectorResult` face that contains a list of detections, each detection
* has a bounding box that is expressed in the unrotated input frame of reference coordinates
* system, i.e. in `[0,image_width) x [0,image_height)`, which are the dimensions of the underlying
* image data.
@ -142,39 +134,37 @@ NS_SWIFT_NAME(FaceDetector)
NS_SWIFT_NAME(detect(videoFrame:timestampInMilliseconds:));
/**
* Sends live stream image data of type `MPPImage` to perform face detection using the whole
* Sends live stream image data of type `MPImage` to perform face detection using the whole
* image as region of interest. Rotation will be applied according to the `orientation` property of
* the provided `MPPImage`. Only use this method when the `MPPFaceDetector` is created with
* `MPPRunningModeLiveStream`.
* the provided `MPImage`. Only use this method when the `FaceDetector` is created with
* `.liveStream`.
*
* The object which needs to be continuously notified of the available results of face
* detection must confirm to `MPPFaceDetectorLiveStreamDelegate` protocol and implement the
* `faceDetector:didFinishDetectionWithResult:timestampInMilliseconds:error:` delegate method.
* detection must confirm to `FaceDetectorLiveStreamDelegate` protocol and implement the
* `faceDetector(_:didFinishDetectionWithResult:timestampInMilliseconds:error:)` delegate method.
*
* It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent
* to the face detector. The input timestamps must be monotonically increasing.
*
* This method supports classification of RGBA images. If your `MPPImage` has a source type of
* `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer
* must have one of the following pixel format types:
* This method supports classification of RGBA images. If your `MPImage` has a source type of
* `.pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the
* following pixel format types:
* 1. kCVPixelFormatType_32BGRA
* 2. kCVPixelFormatType_32RGBA
*
* If the input `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color
* If the input `MPImage` has a source type of `.image` ensure that the color
* space is RGB with an Alpha channel.
*
* If this method is used for classifying live camera frames using `AVFoundation`, ensure that you
* request `AVCaptureVideoDataOutput` to output frames in `kCMPixelFormat_32RGBA` using its
* `videoSettings` property.
*
* @param image A live stream image data of type `MPPImage` on which face detection is to be
* @param image A live stream image data of type `MPImage` on which face detection is to be
* performed.
* @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input
* image is sent to the face detector. The input timestamps must be monotonically increasing.
* @param error An optional error parameter populated when there is an error in performing face
* detection on the input live stream image data.
*
* @return `YES` if the image was sent to the task successfully, otherwise `NO`.
* @return `true` if the image was sent to the task successfully, otherwise `false`.
*/
- (BOOL)detectAsyncInImage:(MPPImage *)image
timestampInMilliseconds:(NSInteger)timestampInMilliseconds

View File

@ -23,11 +23,11 @@ NS_ASSUME_NONNULL_BEGIN
@class MPPFaceDetector;
/**
* This protocol defines an interface for the delegates of `MPPFaceDetector` face to receive
* This protocol defines an interface for the delegates of `FaceDetector` face to receive
* results of performing asynchronous face detection on images (i.e, when `runningMode` =
* `MPPRunningModeLiveStream`).
* `.liveStream`).
*
* The delegate of `MPPFaceDetector` must adopt `MPPFaceDetectorLiveStreamDelegate` protocol.
* The delegate of `FaceDetector` must adopt `FaceDetectorLiveStreamDelegate` protocol.
* The methods in this protocol are optional.
*/
NS_SWIFT_NAME(FaceDetectorLiveStreamDelegate)
@ -37,14 +37,14 @@ NS_SWIFT_NAME(FaceDetectorLiveStreamDelegate)
/**
* This method notifies a delegate that the results of asynchronous face detection of
* an image submitted to the `MPPFaceDetector` is available.
* an image submitted to the `FaceDetector` is available.
*
* This method is called on a private serial dispatch queue created by the `MPPFaceDetector`
* This method is called on a private serial dispatch queue created by the `FaceDetector`
* for performing the asynchronous delegates calls.
*
* @param faceDetector The face detector which performed the face detection.
* This is useful to test equality when there are multiple instances of `MPPFaceDetector`.
* @param result The `MPPFaceDetectorResult` object that contains a list of detections, each
* This is useful to test equality when there are multiple instances of `FaceDetector`.
* @param result The `FaceDetectorResult` object that contains a list of detections, each
* detection has a bounding box that is expressed in the unrotated input frame of reference
* coordinates system, i.e. in `[0,image_width) x [0,image_height)`, which are the dimensions of the
* underlying image data.
@ -60,26 +60,26 @@ NS_SWIFT_NAME(FaceDetectorLiveStreamDelegate)
NS_SWIFT_NAME(faceDetector(_:didFinishDetection:timestampInMilliseconds:error:));
@end
/** Options for setting up a `MPPFaceDetector`. */
/** Options for setting up a `FaceDetector`. */
NS_SWIFT_NAME(FaceDetectorOptions)
@interface MPPFaceDetectorOptions : MPPTaskOptions <NSCopying>
/**
* Running mode of the face detector task. Defaults to `MPPRunningModeImage`.
* `MPPFaceDetector` can be created with one of the following running modes:
* 1. `MPPRunningModeImage`: The mode for performing face detection on single image inputs.
* 2. `MPPRunningModeVideo`: The mode for performing face detection on the decoded frames of a
* Running mode of the face detector task. Defaults to `.image`.
* `FaceDetector` can be created with one of the following running modes:
* 1. `image`: The mode for performing face detection on single image inputs.
* 2. `video`: The mode for performing face detection on the decoded frames of a
* video.
* 3. `MPPRunningModeLiveStream`: The mode for performing face detection on a live stream of
* 3. `liveStream`: The mode for performing face detection on a live stream of
* input data, such as from the camera.
*/
@property(nonatomic) MPPRunningMode runningMode;
/**
* An object that confirms to `MPPFaceDetectorLiveStreamDelegate` protocol. This object must
* implement `faceDetector:didFinishDetectionWithResult:timestampInMilliseconds:error:` to receive
* the results of performing asynchronous face detection on images (i.e, when `runningMode` =
* `MPPRunningModeLiveStream`).
* An object that confirms to `FaceDetectorLiveStreamDelegate` protocol. This object must
* implement `faceDetector(_:didFinishDetectionWithResult:timestampInMilliseconds:error:)` to
* receive the results of performing asynchronous face detection on images (i.e, when `runningMode`
* = `.liveStream`).
*/
@property(nonatomic, weak, nullable) id<MPPFaceDetectorLiveStreamDelegate>
faceDetectorLiveStreamDelegate;

View File

@ -18,27 +18,27 @@
NS_ASSUME_NONNULL_BEGIN
/** Represents the detection results generated by `MPPFaceDetector`. */
/** Represents the detection results generated by `FaceDetector`. */
NS_SWIFT_NAME(FaceDetectorResult)
@interface MPPFaceDetectorResult : MPPTaskResult
/**
* The array of `MPPDetection` objects each of which has a bounding box that is expressed in the
* The array of `Detection` objects each of which has a bounding box that is expressed in the
* unrotated input frame of reference coordinates system, i.e. in `[0,image_width) x
* [0,image_height)`, which are the dimensions of the underlying image data.
*/
@property(nonatomic, readonly) NSArray<MPPDetection *> *detections;
/**
* Initializes a new `MPPFaceDetectorResult` with the given array of detections and timestamp (in
* Initializes a new `FaceDetectorResult` with the given array of detections and timestamp (in
* milliseconds).
*
* @param detections An array of `MPPDetection` objects each of which has a bounding box that is
* @param detections An array of `Detection` objects each of which has a bounding box that is
* expressed in the unrotated input frame of reference coordinates system, i.e. in `[0,image_width)
* x [0,image_height)`, which are the dimensions of the underlying image data.
* @param timestampInMilliseconds The timestamp (in milliseconds) for this result.
*
* @return An instance of `MPPFaceDetectorResult` initialized with the given array of detections
* @return An instance of `FaceDetectorResult` initialized with the given array of detections
* and timestamp (in milliseconds).
*/
- (instancetype)initWithDetections:(NSArray<MPPDetection *> *)detections

View File

@ -30,27 +30,23 @@ NS_SWIFT_NAME(FaceLandmarker)
@interface MPPFaceLandmarker : NSObject
/**
* Creates a new instance of `MPPFaceLandmarker` from an absolute path to a TensorFlow Lite model
* file stored locally on the device and the default `MPPFaceLandmarker`.
* Creates a new instance of `FaceLandmarker` from an absolute path to a TensorFlow Lite model
* file stored locally on the device and the default `FaceLandmarker`.
*
* @param modelPath An absolute path to a TensorFlow Lite model file stored locally on the device.
* @param error An optional error parameter populated when there is an error in initializing the
* face landmaker.
*
* @return A new instance of `MPPFaceLandmarker` with the given model path. `nil` if there is an
* @return A new instance of `FaceLandmarker` with the given model path. `nil` if there is an
* error in initializing the face landmaker.
*/
- (nullable instancetype)initWithModelPath:(NSString *)modelPath error:(NSError **)error;
/**
* Creates a new instance of `MPPFaceLandmarker` from the given `MPPFaceLandmarkerOptions`.
* Creates a new instance of `FaceLandmarker` from the given `FaceLandmarkerOptions`.
*
* @param options The options of type `MPPFaceLandmarkerOptions` to use for configuring the
* @param options The options of type `FaceLandmarkerOptions` to use for configuring the
* `MPPFaceLandmarker`.
* @param error An optional error parameter populated when there is an error in initializing the
* face landmaker.
*
* @return A new instance of `MPPFaceLandmarker` with the given options. `nil` if there is an error
* @return A new instance of `FaceLandmarker` with the given options. `nil` if there is an error
* in initializing the face landmaker.
*/
- (nullable instancetype)initWithOptions:(MPPFaceLandmarkerOptions *)options
@ -59,49 +55,45 @@ NS_SWIFT_NAME(FaceLandmarker)
/**
* Performs face landmark detection on the provided MPPImage using the whole image as region of
* interest. Rotation will be applied according to the `orientation` property of the provided
* `MPPImage`. Only use this method when the `MPPFaceLandmarker` is created with
* `MPPRunningModeImage`.
* `MPImage`. Only use this method when the `FaceLandmarker` is created with `.image`.
*
* This method supports RGBA images. If your `MPPImage` has a source type of
* `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer
* must have one of the following pixel format types:
* This method supports RGBA images. If your `MPPImage` has a source type of `.pixelBuffer` or
* `.sampleBuffer`, the underlying pixel buffer must have one of the following pixel format
* types:
* 1. kCVPixelFormatType_32BGRA
* 2. kCVPixelFormatType_32RGBA
*
* If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is
* RGB with an Alpha channel.
* If your `MPImage` has a source type of `.image` ensure that the color space is RGB with an
* Alpha channel.
*
* @param image The `MPPImage` on which face landmark detection is to be performed.
* @param error An optional error parameter populated when there is an error in performing face
* landmark detection on the input image.
* @param image The `MPImage` on which face landmark detection is to be performed.
*
* @return An `MPPFaceLandmarkerResult` that contains a list of landmarks.
* @return An `MPPFaceLandmarkerResult` that contains a list of landmarks. `nil` if there is an
* error in initializing the face landmaker.
*/
- (nullable MPPFaceLandmarkerResult *)detectInImage:(MPPImage *)image
error:(NSError **)error NS_SWIFT_NAME(detect(image:));
/**
* Performs face landmark detection on the provided video frame of type `MPPImage` using the whole
* Performs face landmark detection on the provided video frame of type `MPImage` using the whole
* image as region of interest. Rotation will be applied according to the `orientation` property of
* the provided `MPPImage`. Only use this method when the `MPPFaceLandmarker` is created with
* `MPPRunningModeVideo`.
* the provided `MPImage`. Only use this method when the `MPPFaceLandmarker` is created with
* running mode `.video`.
*
* This method supports RGBA images. If your `MPPImage` has a source type of
* `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer
* must have one of the following pixel format types:
* This method supports RGBA images. If your `MPImage` has a source type of `.pixelBuffer` or
* `.sampleBuffer`, the underlying pixel buffer must have one of the following pixel format types:
* 1. kCVPixelFormatType_32BGRA
* 2. kCVPixelFormatType_32RGBA
*
* If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is
* RGB with an Alpha channel.
* If your `MPImage` has a source type of `.image` ensure that the color space is RGB with an Alpha
* channel.
*
* @param image The `MPPImage` on which face landmark detection is to be performed.
* @param image The `MPImage` on which face landmark detection is to be performed.
* @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input
* timestamps must be monotonically increasing.
* @param error An optional error parameter populated when there is an error in performing face
* landmark detection on the input image.
*
* @return An `MPPFaceLandmarkerResult` that contains a list of landmarks.
* @return An `FaceLandmarkerResult` that contains a list of landmarks. `nil` if there is an
* error in initializing the face landmaker.
*/
- (nullable MPPFaceLandmarkerResult *)detectInVideoFrame:(MPPImage *)image
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
@ -109,39 +101,36 @@ NS_SWIFT_NAME(FaceLandmarker)
NS_SWIFT_NAME(detect(videoFrame:timestampInMilliseconds:));
/**
* Sends live stream image data of type `MPPImage` to perform face landmark detection using the
* Sends live stream image data of type `MPImage` to perform face landmark detection using the
* whole image as region of interest. Rotation will be applied according to the `orientation`
* property of the provided `MPPImage`. Only use this method when the `MPPFaceLandmarker` is created
* with `MPPRunningModeLiveStream`.
* property of the provided `MPImage`. Only use this method when the `FaceLandmarker` is created
* with `.liveStream`.
*
* The object which needs to be continuously notified of the available results of face
* detection must confirm to `MPPFaceLandmarkerLiveStreamDelegate` protocol and implement the
* `faceLandmarker:didFinishDetectionWithResult:timestampInMilliseconds:error:` delegate method.
* detection must confirm to `FaceLandmarkerLiveStreamDelegate` protocol and implement the
* `faceLandmarker(_:didFinishDetectionWithResult:timestampInMilliseconds:error:)` delegate method.
*
* It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent
* to the face detector. The input timestamps must be monotonically increasing.
*
* This method supports RGBA images. If your `MPPImage` has a source type of
* `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer
* must have one of the following pixel format types:
* This method supports RGBA images. If your `MPImage` has a source type of `.pixelBuffer` or
* `.sampleBuffer`, the underlying pixel buffer must have one of the following pixel format types:
* 1. kCVPixelFormatType_32BGRA
* 2. kCVPixelFormatType_32RGBA
*
* If the input `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color
* space is RGB with an Alpha channel.
* If the input `MPImage` has a source type of `.image` ensure that the color space is RGB with an
* Alpha channel.
*
* If this method is used for classifying live camera frames using `AVFoundation`, ensure that you
* request `AVCaptureVideoDataOutput` to output frames in `kCMPixelFormat_32RGBA` using its
* `videoSettings` property.
*
* @param image A live stream image data of type `MPPImage` on which face landmark detection is to
* be performed.
* @param image A live stream image data of type `MPImage` on which face landmark detection is to be
* performed.
* @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input
* image is sent to the face detector. The input timestamps must be monotonically increasing.
* @param error An optional error parameter populated when there is an error when sending the input
* image to the graph.
*
* @return `YES` if the image was sent to the task successfully, otherwise `NO`.
* @return `true` if the image was sent to the task successfully, otherwise `false`.
*/
- (BOOL)detectAsyncInImage:(MPPImage *)image
timestampInMilliseconds:(NSInteger)timestampInMilliseconds

View File

@ -23,11 +23,11 @@ NS_ASSUME_NONNULL_BEGIN
@class MPPFaceLandmarker;
/**
* This protocol defines an interface for the delegates of `MPPFaceLandmarker` face to receive
* This protocol defines an interface for the delegates of `FaceLandmarker` face to receive
* results of performing asynchronous face detection on images (i.e, when `runningMode` =
* `MPPRunningModeLiveStream`).
* `.liveStream`).
*
* The delegate of `MPPFaceLandmarker` must adopt `MPPFaceLandmarkerLiveStreamDelegate` protocol.
* The delegate of `FaceLandmarker` must adopt `FaceLandmarkerLiveStreamDelegate` protocol.
* The methods in this protocol are optional.
*/
NS_SWIFT_NAME(FaceLandmarkerLiveStreamDelegate)
@ -35,14 +35,14 @@ NS_SWIFT_NAME(FaceLandmarkerLiveStreamDelegate)
/**
* This method notifies a delegate that the results of asynchronous face detection of
* an image submitted to the `MPPFaceLandmarker` is available.
* an image submitted to the `FaceLandmarker` is available.
*
* This method is called on a private serial dispatch queue created by the `MPPFaceLandmarker`
* This method is called on a private serial dispatch queue created by the `FaceLandmarker`
* for performing the asynchronous delegates calls.
*
* @param faceLandmarker The face landmarker which performed the face landmark detctions.
* This is useful to test equality when there are multiple instances of `MPPFaceLandmarker`.
* @param result The `MPPFaceLandmarkerResult` object that contains a list of landmarks.
* This is useful to test equality when there are multiple instances of `FaceLandmarker`.
* @param result The `FaceLandmarkerResult` object that contains a list of landmarks.
* @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input
* image was sent to the face detector.
* @param error An optional error parameter populated when there is an error in performing face
@ -55,26 +55,25 @@ NS_SWIFT_NAME(FaceLandmarkerLiveStreamDelegate)
NS_SWIFT_NAME(faceLandmarker(_:didFinishDetection:timestampInMilliseconds:error:));
@end
/** Options for setting up a `MPPFaceLandmarker`. */
/** Options for setting up a `FaceLandmarker`. */
NS_SWIFT_NAME(FaceLandmarkerOptions)
@interface MPPFaceLandmarkerOptions : MPPTaskOptions <NSCopying>
/**
* Running mode of the face landmark dection task. Defaults to `MPPRunningModeImage`.
* `MPPFaceLandmarker` can be created with one of the following running modes:
* 1. `MPPRunningModeImage`: The mode for performing face detection on single image inputs.
* 2. `MPPRunningModeVideo`: The mode for performing face detection on the decoded frames of a
* video.
* 3. `MPPRunningModeLiveStream`: The mode for performing face detection on a live stream of
* input data, such as from the camera.
* Running mode of the face landmark dection task. Defaults to `.image`. `FaceLandmarker` can be
* created with one of the following running modes:
* 1. `image`: The mode for performing face detection on single image inputs.
* 2. `video`: The mode for performing face detection on the decoded frames of a video.
* 3. `.liveStream`: The mode for performing face detection on a live stream of input data, such as
* from the camera.
*/
@property(nonatomic) MPPRunningMode runningMode;
/**
* An object that confirms to `MPPFaceLandmarkerLiveStreamDelegate` protocol. This object must
* implement `faceLandmarker:didFinishDetectionWithResult:timestampInMilliseconds:error:` to receive
* the results of performing asynchronous face landmark detection on images (i.e, when `runningMode`
* = `MPPRunningModeLiveStream`).
* An object that confirms to `FaceLandmarkerLiveStreamDelegate` protocol. This object must
* implement `faceLandmarker(_:didFinishDetectionWithResult:timestampInMilliseconds:error:)` to
* receive the results of performing asynchronous face landmark detection on images (i.e, when
* `runningMode` = `.liveStream`).
*/
@property(nonatomic, weak, nullable) id<MPPFaceLandmarkerLiveStreamDelegate>
faceLandmarkerLiveStreamDelegate;

View File

@ -54,7 +54,7 @@ NS_SWIFT_NAME(TransformMatrix)
@end
/** Represents the detection results generated by `MPPFaceLandmarker`. */
/** Represents the detection results generated by `FaceLandmarker`. */
NS_SWIFT_NAME(FaceLandmarkerResult)
@interface MPPFaceLandmarkerResult : MPPTaskResult
@ -72,16 +72,16 @@ NS_SWIFT_NAME(FaceLandmarkerResult)
@property(nonatomic, readonly) NSArray<MPPTransformMatrix *> *facialTransformationMatrixes;
/**
* Initializes a new `MPPFaceLandmarkerResult` with the given array of landmarks, blendshapes,
* Initializes a new `FaceLandmarkerResult` with the given array of landmarks, blendshapes,
* facialTransformationMatrixes and timestamp (in milliseconds).
*
* @param faceLandmarks An array of `MPPNormalizedLandmark` objects.
* @param faceBlendshapes An array of `MPPClassifications` objects.
* @param faceLandmarks An array of `NormalizedLandmark` objects.
* @param faceBlendshapes An array of `Classifications` objects.
* @param facialTransformationMatrixes An array of flattended matrices.
* @param timestampInMilliseconds The timestamp (in milliseconds) for this result.
*
* @return An instance of `MPPFaceLandmarkerResult` initialized with the given array of detections
* and timestamp (in milliseconds).
* @return An instance of `FaceLandmarkerResult` initialized with the given array of detections and
* timestamp (in milliseconds).
*/
- (instancetype)initWithFaceLandmarks:(NSArray<NSArray<MPPNormalizedLandmark *> *> *)faceLandmarks
faceBlendshapes:(NSArray<MPPClassifications *> *)faceBlendshapes

View File

@ -53,28 +53,24 @@ NS_SWIFT_NAME(ImageClassifier)
@interface MPPImageClassifier : NSObject
/**
* Creates a new instance of `MPPImageClassifier` from an absolute path to a TensorFlow Lite model
* file stored locally on the device and the default `MPPImageClassifierOptions`.
* Creates a new instance of `ImageClassifier` from an absolute path to a TensorFlow Lite model file
* stored locally on the device and the default `ImageClassifierOptions`.
*
* @param modelPath An absolute path to a TensorFlow Lite model file stored locally on the device.
* @param error An optional error parameter populated when there is an error in initializing the
* image classifier.
*
* @return A new instance of `MPPImageClassifier` with the given model path. `nil` if there is an
* @return A new instance of `ImageClassifier` with the given model path. `nil` if there is an
* error in initializing the image classifier.
*/
- (nullable instancetype)initWithModelPath:(NSString *)modelPath error:(NSError **)error;
/**
* Creates a new instance of `MPPImageClassifier` from the given `MPPImageClassifierOptions`.
* Creates a new instance of `ImageClassifier` from the given `ImageClassifierOptions`.
*
* @param options The options of type `MPPImageClassifierOptions` to use for configuring the
* `MPPImageClassifier`.
* @param error An optional error parameter populated when there is an error in initializing the
* image classifier.
* @param options The options of type `ImageClassifierOptions` to use for configuring the
* `ImageClassifier`.
*
* @return A new instance of `MPPImageClassifier` with the given options. `nil` if there is an error
* in initializing the image classifier.
* @return A new instance of `ImageClassifier` with the given options. `nil` if there is an error in
* initializing the image classifier.
*/
- (nullable instancetype)initWithOptions:(MPPImageClassifierOptions *)options
error:(NSError **)error NS_DESIGNATED_INITIALIZER;
@ -82,49 +78,46 @@ NS_SWIFT_NAME(ImageClassifier)
/**
* Performs image classification on the provided MPPImage using the whole image as region of
* interest. Rotation will be applied according to the `orientation` property of the provided
* `MPPImage`. Only use this method when the `MPPImageClassifier` is created with
* `MPPRunningModeImage`.
* This method supports classification of RGBA images. If your `MPPImage` has a source type of
* `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer
* must have one of the following pixel format types:
* `MPImage`. Only use this method when the `ImageClassifier` is created with running mode,
* `.image`.
*
* This method supports classification of RGBA images. If your `MPImage` has a source type
* ofm`.pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the following
* pixel format types:
* 1. kCVPixelFormatType_32BGRA
* 2. kCVPixelFormatType_32RGBA
*
* If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is
* RGB with an Alpha channel.
* If your `MPImage` has a source type of `.image` ensure that the color space is RGB with an Alpha
* channel.
*
* @param image The `MPPImage` on which image classification is to be performed.
* @param error An optional error parameter populated when there is an error in performing image
* classification on the input image.
*
* @return An `MPPImageClassifierResult` object that contains a list of image classifications.
* @return An `ImageClassifierResult` object that contains a list of image classifications.
*/
- (nullable MPPImageClassifierResult *)classifyImage:(MPPImage *)image
error:(NSError **)error
NS_SWIFT_NAME(classify(image:));
/**
* Performs image classification on the provided `MPPImage` cropped to the specified region of
* Performs image classification on the provided `MPImage` cropped to the specified region of
* interest. Rotation will be applied on the cropped image according to the `orientation` property
* of the provided `MPPImage`. Only use this method when the `MPPImageClassifier` is created with
* `MPPRunningModeImage`.
* of the provided `MPImage`. Only use this method when the `MPPImageClassifier` is created with
* running mode, `.image`.
*
* This method supports classification of RGBA images. If your `MPPImage` has a source type of
* `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer
* must have one of the following pixel format types:
* This method supports classification of RGBA images. If your `MPImage` has a source type of
* `.pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the following
* pixel format types:
* 1. kCVPixelFormatType_32BGRA
* 2. kCVPixelFormatType_32RGBA
*
* If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is
* RGB with an Alpha channel.
* If your `MPImage` has a source type of `.image` ensure that the color space is RGB with an Alpha
* channel.
*
* @param image The `MPPImage` on which image classification is to be performed.
* @param roi A `CGRect` specifying the region of interest within the given `MPPImage`, on which
* @param image The `MPImage` on which image classification is to be performed.
* @param roi A `CGRect` specifying the region of interest within the given `MPImage`, on which
* image classification should be performed.
* @param error An optional error parameter populated when there is an error in performing image
* classification on the input image.
*
* @return An `MPPImageClassifierResult` object that contains a list of image classifications.
* @return An `ImageClassifierResult` object that contains a list of image classifications.
*/
- (nullable MPPImageClassifierResult *)classifyImage:(MPPImage *)image
regionOfInterest:(CGRect)roi
@ -132,30 +125,28 @@ NS_SWIFT_NAME(ImageClassifier)
NS_SWIFT_NAME(classify(image:regionOfInterest:));
/**
* Performs image classification on the provided video frame of type `MPPImage` using the whole
* Performs image classification on the provided video frame of type `MPImage` using the whole
* image as region of interest. Rotation will be applied according to the `orientation` property of
* the provided `MPPImage`. Only use this method when the `MPPImageClassifier` is created with
* `MPPRunningModeVideo`.
* the provided `MPImage`. Only use this method when the `MPPImageClassifier` is created with
* running mode `.video`.
*
* It's required to provide the video frame's timestamp (in milliseconds). The input timestamps must
* be monotonically increasing.
*
* This method supports classification of RGBA images. If your `MPPImage` has a source type of
* `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer
* must have one of the following pixel format types:
* This method supports classification of RGBA images. If your `MPImage` has a source type of
* `.pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the following
* pixel format types:
* 1. kCVPixelFormatType_32BGRA
* 2. kCVPixelFormatType_32RGBA
*
* If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is
* RGB with an Alpha channel.
* If your `MPImage` has a source type of `.image` ensure that the color space is RGB with an Alpha
* channel.
*
* @param image The `MPPImage` on which image classification is to be performed.
* @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input
* timestamps must be monotonically increasing.
* @param error An optional error parameter populated when there is an error in performing image
* classification on the input video frame.
*
* @return An `MPPImageClassifierResult` object that contains a list of image classifications.
* @return An `ImageClassifierResult` object that contains a list of image classifications.
*/
- (nullable MPPImageClassifierResult *)classifyVideoFrame:(MPPImage *)image
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
@ -163,33 +154,30 @@ NS_SWIFT_NAME(ImageClassifier)
NS_SWIFT_NAME(classify(videoFrame:timestampInMilliseconds:));
/**
* Performs image classification on the provided video frame of type `MPPImage` cropped to the
* Performs image classification on the provided video frame of type `MPImage` cropped to the
* specified region of interest. Rotation will be applied according to the `orientation` property of
* the provided `MPPImage`. Only use this method when the `MPPImageClassifier` is created with
* `MPPRunningModeVideo`.
* the provided `MPImage`. Only use this method when the `ImageClassifier` is created with `.video`.
*
* It's required to provide the video frame's timestamp (in milliseconds). The input timestamps must
* be monotonically increasing.
*
* This method supports classification of RGBA images. If your `MPPImage` has a source type of
* `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer
* must have one of the following pixel format types:
* This method supports classification of RGBA images. If your `MPImage` has a source type of
* `.pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the following
* pixel format types:
* 1. kCVPixelFormatType_32BGRA
* 2. kCVPixelFormatType_32RGBA
*
* If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is
* RGB with an Alpha channel.
* If your `MPImage` has a source type of `.image` ensure that the color space is RGB with an Alpha
* channel.
*
* @param image A live stream image data of type `MPPImage` on which image classification is to be
* @param image A live stream image data of type `MPImage` on which image classification is to be
* performed.
* @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input
* timestamps must be monotonically increasing.
* @param roi A `CGRect` specifying the region of interest within the video frame of type
* `MPPImage`, on which image classification should be performed.
* @param error An optional error parameter populated when there is an error in performing image
* classification on the input video frame.
* `MPImage`, on which image classification should be performed.
*
* @return An `MPPImageClassifierResult` object that contains a list of image classifications.
* @return An `ImageClassifierResult` object that contains a list of image classifications.
*/
- (nullable MPPImageClassifierResult *)classifyVideoFrame:(MPPImage *)image
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
@ -198,40 +186,38 @@ NS_SWIFT_NAME(ImageClassifier)
NS_SWIFT_NAME(classify(videoFrame:timestampInMilliseconds:regionOfInterest:));
/**
* Sends live stream image data of type `MPPImage` to perform image classification using the whole
* Sends live stream image data of type `MPImage` to perform image classification using the whole
* image as region of interest. Rotation will be applied according to the `orientation` property of
* the provided `MPPImage`. Only use this method when the `MPPImageClassifier` is created with
* the provided `MPImage`. Only use this method when the `ImageClassifier` is created with
* `MPPRunningModeLiveStream`.
*
* The object which needs to be continuously notified of the available results of image
* classification must confirm to `MPPImageClassifierLiveStreamDelegate` protocol and implement the
* `imageClassifier:didFinishClassificationWithResult:timestampInMilliseconds:error:`
* classification must confirm to `ImageClassifierLiveStreamDelegate` protocol and implement the
* `imageClassifier(_:didFinishClassificationWithResult:timestampInMilliseconds:error:)`
* delegate method.
*
* It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent
* to the image classifier. The input timestamps must be monotonically increasing.
*
* This method supports classification of RGBA images. If your `MPPImage` has a source type of
* `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer
* must have one of the following pixel format types:
* This method supports classification of RGBA images. If your `MPImage` has a source type of
* .pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the following
* pixel format types:
* 1. kCVPixelFormatType_32BGRA
* 2. kCVPixelFormatType_32RGBA
*
* If the input `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color
* space is RGB with an Alpha channel.
* If the input `MPImage` has a source type of `.image` ensure that the color space is RGB with an
* Alpha channel.
*
* If this method is used for classifying live camera frames using `AVFoundation`, ensure that you
* request `AVCaptureVideoDataOutput` to output frames in `kCMPixelFormat_32RGBA` using its
* `videoSettings` property.
*
* @param image A live stream image data of type `MPPImage` on which image classification is to be
* @param image A live stream image data of type `MPImage` on which image classification is to be
* performed.
* @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input
* image is sent to the image classifier. The input timestamps must be monotonically increasing.
* @param error An optional error parameter populated when there is an error in performing image
* classification on the input live stream image data.
*
* @return `YES` if the image was sent to the task successfully, otherwise `NO`.
* @return `true` if the image was sent to the task successfully, otherwise `false`.
*/
- (BOOL)classifyAsyncImage:(MPPImage *)image
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
@ -239,42 +225,40 @@ NS_SWIFT_NAME(ImageClassifier)
NS_SWIFT_NAME(classifyAsync(image:timestampInMilliseconds:));
/**
* Sends live stream image data of type ``MPPImage`` to perform image classification, cropped to the
* Sends live stream image data of type `MPImage` to perform image classification, cropped to the
* specified region of interest.. Rotation will be applied according to the `orientation` property
* of the provided `MPPImage`. Only use this method when the `MPPImageClassifier` is created with
* `MPPRunningModeLiveStream`.
* of the provided `MPImage`. Only use this method when the `ImageClassifier` is created with
* `.liveStream`.
*
* The object which needs to be continuously notified of the available results of image
* classification must confirm to `MPPImageClassifierLiveStreamDelegate` protocol and implement the
* `imageClassifier:didFinishClassificationWithResult:timestampInMilliseconds:error:` delegate
* classification must confirm to `ImageClassifierLiveStreamDelegate` protocol and implement the
* `imageClassifier(_:didFinishClassificationWithResult:timestampInMilliseconds:error:)` delegate
* method.
*
* It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent
* to the image classifier. The input timestamps must be monotonically increasing.
*
* This method supports classification of RGBA images. If your `MPPImage` has a source type of
* `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer
* must have one of the following pixel format types:
* This method supports classification of RGBA images. If your `MPImage` has a source type of
* `.pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the following
* pixel format types:
* 1. kCVPixelFormatType_32BGRA
* 2. kCVPixelFormatType_32RGBA
*
* If the input `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color
* space is RGB with an Alpha channel.
* If the input `MPImage` has a source type of `.image` ensure that the color space is RGB with an
* Alpha channel.
*
* If this method is used for classifying live camera frames using `AVFoundation`, ensure that you
* request `AVCaptureVideoDataOutput` to output frames in `kCMPixelFormat_32RGBA` using its
* `videoSettings` property.
*
* @param image A live stream image data of type `MPPImage` on which image classification is to be
* @param image A live stream image data of type `MPImage` on which image classification is to be
* performed.
* @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input
* image is sent to the image classifier. The input timestamps must be monotonically increasing.
* @param roi A `CGRect` specifying the region of interest within the given live stream image data
* of type `MPPImage`, on which image classification should be performed.
* @param error An optional error parameter populated when there is an error in performing image
* classification on the input live stream image data.
* of type `MPImage`, on which image classification should be performed.
*
* @return `YES` if the image was sent to the task successfully, otherwise `NO`.
* @return `true` if the image was sent to the task successfully, otherwise `false`.
*/
- (BOOL)classifyAsyncImage:(MPPImage *)image
timestampInMilliseconds:(NSInteger)timestampInMilliseconds

View File

@ -23,11 +23,11 @@ NS_ASSUME_NONNULL_BEGIN
@class MPPImageClassifier;
/**
* This protocol defines an interface for the delegates of `MPPImageClassifier` object to receive
* This protocol defines an interface for the delegates of `ImageClassifier` object to receive
* results of asynchronous classification of images (i.e, when `runningMode =
* MPPRunningModeLiveStream`).
* .liveStream`).
*
* The delegate of `MPPImageClassifier` must adopt `MPPImageClassifierLiveStreamDelegate` protocol.
* The delegate of `ImageClassifier` must adopt `ImageClassifierLiveStreamDelegate` protocol.
* The methods in this protocol are optional.
*/
NS_SWIFT_NAME(ImageClassifierLiveStreamDelegate)
@ -36,14 +36,14 @@ NS_SWIFT_NAME(ImageClassifierLiveStreamDelegate)
@optional
/**
* This method notifies a delegate that the results of asynchronous classification of
* an image submitted to the `MPPImageClassifier` is available.
* an image submitted to the `ImageClassifier` is available.
*
* This method is called on a private serial queue created by the `MPPImageClassifier`
* This method is called on a private serial queue created by the `ImageClassifier`
* for performing the asynchronous delegates calls.
*
* @param imageClassifier The image classifier which performed the classification.
* This is useful to test equality when there are multiple instances of `MPPImageClassifier`.
* @param result An `MPPImageClassifierResult` object that contains a list of image classifications.
* This is useful to test equality when there are multiple instances of `ImageClassifier`.
* @param result An `ImageClassifierResult` object that contains a list of image classifications.
* @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input
* image was sent to the image classifier.
* @param error An optional error parameter populated when there is an error in performing image
@ -57,27 +57,27 @@ NS_SWIFT_NAME(ImageClassifierLiveStreamDelegate)
@end
/**
* Options for setting up a `MPPImageClassifier`.
* Options for setting up a `ImageClassifier`.
*/
NS_SWIFT_NAME(ImageClassifierOptions)
@interface MPPImageClassifierOptions : MPPTaskOptions <NSCopying>
/**
* Running mode of the image classifier task. Defaults to `MPPRunningModeImage`.
* `MPPImageClassifier` can be created with one of the following running modes:
* 1. `MPPRunningModeImage`: The mode for performing classification on single image inputs.
* 2. `MPPRunningModeVideo`: The mode for performing classification on the decoded frames of a
* Running mode of the image classifier task. Defaults to `.image`.
* `ImageClassifier` can be created with one of the following running modes:
* 1. `.image`: The mode for performing classification on single image inputs.
* 2. `.video`: The mode for performing classification on the decoded frames of a
* video.
* 3. `MPPRunningModeLiveStream`: The mode for performing classification on a live stream of input
* 3. `.liveStream`: The mode for performing classification on a live stream of input
* data, such as from the camera.
*/
@property(nonatomic) MPPRunningMode runningMode;
/**
* An object that confirms to `MPPImageClassifierLiveStreamDelegate` protocol. This object must
* implement `objectDetector:didFinishDetectionWithResult:timestampInMilliseconds:error:` to receive
* the results of asynchronous classification on images (i.e, when `runningMode =
* MPPRunningModeLiveStream`).
* An object that confirms to `ImageClassifierLiveStreamDelegate` protocol. This object must
* implement `objectDetector(_:didFinishDetectionWithResult:timestampInMilliseconds:error:)` to
* receive the results of asynchronous classification on images (i.e, when `runningMode =
* .liveStream`).
*/
@property(nonatomic, weak, nullable) id<MPPImageClassifierLiveStreamDelegate>
imageClassifierLiveStreamDelegate;

View File

@ -18,23 +18,23 @@
NS_ASSUME_NONNULL_BEGIN
/** Represents the classification results generated by `MPPImageClassifier`. **/
/** Represents the classification results generated by `ImageClassifier`. **/
NS_SWIFT_NAME(ImageClassifierResult)
@interface MPPImageClassifierResult : MPPTaskResult
/** The `MPPClassificationResult` instance containing one set of results per classifier head. **/
/** The `ClassificationResult` instance containing one set of results per classifier head. **/
@property(nonatomic, readonly) MPPClassificationResult *classificationResult;
/**
* Initializes a new `MPPImageClassifierResult` with the given `MPPClassificationResult` and
* Initializes a new `ImageClassifierResult` with the given `ClassificationResult` and
* timestamp (in milliseconds).
*
* @param classificationResult The `MPPClassificationResult` instance containing one set of results
* @param classificationResult The `ClassificationResult` instance containing one set of results
* per classifier head.
* @param timestampInMilliseconds The timestamp (in milliseconds) for this result.
*
* @return An instance of `MPPImageClassifierResult` initialized with the given
* `MPPClassificationResult` and timestamp (in milliseconds).
* @return An instance of `ImageClassifierResult` initialized with the given
* `ClassificationResult` and timestamp (in milliseconds).
*/
- (instancetype)initWithClassificationResult:(MPPClassificationResult *)classificationResult
timestampInMilliseconds:(NSInteger)timestampInMilliseconds;

View File

@ -64,52 +64,50 @@ NS_SWIFT_NAME(ObjectDetector)
@interface MPPObjectDetector : NSObject
/**
* Creates a new instance of `MPPObjectDetector` from an absolute path to a TensorFlow Lite model
* file stored locally on the device and the default `MPPObjectDetector`.
* Creates a new instance of `ObjectDetector` from an absolute path to a TensorFlow Lite model
* file stored locally on the device and the default `ObjectDetector`.
*
* @param modelPath An absolute path to a TensorFlow Lite model file stored locally on the device.
* @param error An optional error parameter populated when there is an error in initializing the
* object detector.
*
* @return A new instance of `MPPObjectDetector` with the given model path. `nil` if there is an
* @return A new instance of `ObjectDetector` with the given model path. `nil` if there is an
* error in initializing the object detector.
*/
- (nullable instancetype)initWithModelPath:(NSString *)modelPath error:(NSError **)error;
/**
* Creates a new instance of `MPPObjectDetector` from the given `MPPObjectDetectorOptions`.
* Creates a new instance of `ObjectDetector` from the given `ObjectDetectorOptions`.
*
* @param options The options of type `MPPObjectDetectorOptions` to use for configuring the
* `MPPObjectDetector`.
* @param options The options of type `ObjectDetectorOptions` to use for configuring the
* `ObjectDetector`.
* @param error An optional error parameter populated when there is an error in initializing the
* object detector.
*
* @return A new instance of `MPPObjectDetector` with the given options. `nil` if there is an error
* @return A new instance of `ObjectDetector` with the given options. `nil` if there is an error
* in initializing the object detector.
*/
- (nullable instancetype)initWithOptions:(MPPObjectDetectorOptions *)options
error:(NSError **)error NS_DESIGNATED_INITIALIZER;
/**
* Performs object detection on the provided MPPImage using the whole image as region of
* Performs object detection on the provided MPImage using the whole image as region of
* interest. Rotation will be applied according to the `orientation` property of the provided
* `MPPImage`. Only use this method when the `MPPObjectDetector` is created with
* `MPPRunningModeImage`.
* `MPImage`. Only use this method when the `ObjectDetector` is created with
* `.image`.
*
* This method supports detecting objects in RGBA images. If your `MPPImage` has a source type of
* `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer
* must have one of the following pixel format types:
* This method supports detecting objects in RGBA images. If your `MPImage` has a source type of
* `.pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the following
* pixel format types:
* 1. kCVPixelFormatType_32BGRA
* 2. kCVPixelFormatType_32RGBA
*
* If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is
* If your `MPImage` has a source type of `.image` ensure that the color space is
* RGB with an Alpha channel.
*
* @param image The `MPPImage` on which object detection is to be performed.
* @param error An optional error parameter populated when there is an error in performing object
* detection on the input image.
* @param image The `.image` on which object detection is to be performed.
*
* @return An `MPPObjectDetectorResult` object that contains a list of detections, each detection
* @return An `ObjectDetectorResult` object that contains a list of detections, each detection
* has a bounding box that is expressed in the unrotated input frame of reference coordinates
* system, i.e. in `[0,image_width) x [0,image_height)`, which are the dimensions of the underlying
* image data.
@ -118,27 +116,25 @@ NS_SWIFT_NAME(ObjectDetector)
error:(NSError **)error NS_SWIFT_NAME(detect(image:));
/**
* Performs object detection on the provided video frame of type `MPPImage` using the whole
* Performs object detection on the provided video frame of type `MPImage` using the whole
* image as region of interest. Rotation will be applied according to the `orientation` property of
* the provided `MPPImage`. Only use this method when the `MPPObjectDetector` is created with
* `MPPRunningModeVideo`.
* the provided `MPImage`. Only use this method when the `MPPObjectDetector` is created with
* `.video`.
*
* This method supports detecting objects in of RGBA images. If your `MPPImage` has a source type of
* `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer
* must have one of the following pixel format types:
* This method supports detecting objects in of RGBA images. If your `MPImage` has a source type of
* .pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the following
* pixel format types:
* 1. kCVPixelFormatType_32BGRA
* 2. kCVPixelFormatType_32RGBA
*
* If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is
* RGB with an Alpha channel.
* If your `MPImage` has a source type of `.image` ensure that the color space is RGB with an Alpha
* channel.
*
* @param image The `MPPImage` on which object detection is to be performed.
* @param image The `MPImage` on which object detection is to be performed.
* @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input
* timestamps must be monotonically increasing.
* @param error An optional error parameter populated when there is an error in performing object
* detection on the input image.
*
* @return An `MPPObjectDetectorResult` object that contains a list of detections, each detection
* @return An `ObjectDetectorResult` object that contains a list of detections, each detection
* has a bounding box that is expressed in the unrotated input frame of reference coordinates
* system, i.e. in `[0,image_width) x [0,image_height)`, which are the dimensions of the underlying
* image data.
@ -149,26 +145,26 @@ NS_SWIFT_NAME(ObjectDetector)
NS_SWIFT_NAME(detect(videoFrame:timestampInMilliseconds:));
/**
* Sends live stream image data of type `MPPImage` to perform object detection using the whole
* Sends live stream image data of type `MPImage` to perform object detection using the whole
* image as region of interest. Rotation will be applied according to the `orientation` property of
* the provided `MPPImage`. Only use this method when the `MPPObjectDetector` is created with
* `MPPRunningModeLiveStream`.
* the provided `MPImage`. Only use this method when the `ObjectDetector` is created with
* `.liveStream`.
*
* The object which needs to be continuously notified of the available results of object
* detection must confirm to `MPPObjectDetectorLiveStreamDelegate` protocol and implement the
* `objectDetector:didFinishDetectionWithResult:timestampInMilliseconds:error:` delegate method.
* detection must confirm to `ObjectDetectorLiveStreamDelegate` protocol and implement the
* `objectDetector(_:didFinishDetectionWithResult:timestampInMilliseconds:error:)` delegate method.
*
* It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent
* to the object detector. The input timestamps must be monotonically increasing.
*
* This method supports detecting objects in RGBA images. If your `MPPImage` has a source type of
* `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer
* must have one of the following pixel format types:
* This method supports detecting objects in RGBA images. If your `MPImage` has a source type of
* `.pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the following
* pixel format types:
* 1. kCVPixelFormatType_32BGRA
* 2. kCVPixelFormatType_32RGBA
*
* If the input `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color
* space is RGB with an Alpha channel.
* If the input `MPImage` has a source type of `.image` ensure that the color space is RGB with an
* Alpha channel.
*
* If this method is used for detecting objects in live camera frames using `AVFoundation`, ensure
* that you request `AVCaptureVideoDataOutput` to output frames in `kCMPixelFormat_32RGBA` using its
@ -178,10 +174,8 @@ NS_SWIFT_NAME(ObjectDetector)
* performed.
* @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input
* image is sent to the object detector. The input timestamps must be monotonically increasing.
* @param error An optional error parameter populated when there is an error in performing object
* detection on the input live stream image data.
*
* @return `YES` if the image was sent to the task successfully, otherwise `NO`.
* @return `true` if the image was sent to the task successfully, otherwise `false`.
*/
- (BOOL)detectAsyncInImage:(MPPImage *)image
timestampInMilliseconds:(NSInteger)timestampInMilliseconds

View File

@ -81,8 +81,7 @@ static NSString *const kTaskName = @"objectDetector";
}
MPPObjectDetectorResult *result = [MPPObjectDetectorResult
objectDetectorResultWithDetectionsPacket:
outputPacketMap[kDetectionsStreamName.cppString]];
objectDetectorResultWithDetectionsPacket:outputPacketMap[kDetectionsStreamName.cppString]];
NSInteger timeStampInMilliseconds =
outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() /

View File

@ -23,11 +23,11 @@ NS_ASSUME_NONNULL_BEGIN
@class MPPObjectDetector;
/**
* This protocol defines an interface for the delegates of `MPPObjectDetector` object to receive
* This protocol defines an interface for the delegates of `ObjectDetector` object to receive
* results of performing asynchronous object detection on images (i.e, when `runningMode` =
* `MPPRunningModeLiveStream`).
* `.liveStream`).
*
* The delegate of `MPPObjectDetector` must adopt `MPPObjectDetectorLiveStreamDelegate` protocol.
* The delegate of `ObjectDetector` must adopt `ObjectDetectorLiveStreamDelegate` protocol.
* The methods in this protocol are optional.
*/
NS_SWIFT_NAME(ObjectDetectorLiveStreamDelegate)
@ -37,14 +37,14 @@ NS_SWIFT_NAME(ObjectDetectorLiveStreamDelegate)
/**
* This method notifies a delegate that the results of asynchronous object detection of
* an image submitted to the `MPPObjectDetector` is available.
* an image submitted to the `ObjectDetector` is available.
*
* This method is called on a private serial dispatch queue created by the `MPPObjectDetector`
* This method is called on a private serial dispatch queue created by the `ObjectDetector`
* for performing the asynchronous delegates calls.
*
* @param objectDetector The object detector which performed the object detection.
* This is useful to test equality when there are multiple instances of `MPPObjectDetector`.
* @param result The `MPPObjectDetectorResult` object that contains a list of detections, each
* This is useful to test equality when there are multiple instances of `ObjectDetector`.
* @param result The `ObjectDetectorResult` object that contains a list of detections, each
* detection has a bounding box that is expressed in the unrotated input frame of reference
* coordinates system, i.e. in `[0,image_width) x [0,image_height)`, which are the dimensions of the
* underlying image data.
@ -60,26 +60,27 @@ NS_SWIFT_NAME(ObjectDetectorLiveStreamDelegate)
NS_SWIFT_NAME(objectDetector(_:didFinishDetection:timestampInMilliseconds:error:));
@end
/** Options for setting up a `MPPObjectDetector`. */
/** Options for setting up a `ObjectDetector`. */
NS_SWIFT_NAME(ObjectDetectorOptions)
@interface MPPObjectDetectorOptions : MPPTaskOptions <NSCopying>
/**
* Running mode of the object detector task. Defaults to `MPPRunningModeImage`.
* `MPPObjectDetector` can be created with one of the following running modes:
* 1. `MPPRunningModeImage`: The mode for performing object detection on single image inputs.
* 2. `MPPRunningModeVideo`: The mode for performing object detection on the decoded frames of a
* Running mode of the object detector task. Defaults to `.image`.
* `ObjectDetector` can be created with one of the following running modes:
* 1. `image`: The mode for performing object detection on single image inputs.
* 2. `video`: The mode for performing object detection on the decoded frames of a
* video.
* 3. `MPPRunningModeLiveStream`: The mode for performing object detection on a live stream of
* 3. `liveStream`: The mode for performing object detection on a live stream of
* input data, such as from the camera.
*/
@property(nonatomic) MPPRunningMode runningMode;
/**
* An object that confirms to `MPPObjectDetectorLiveStreamDelegate` protocol. This object must
* implement `objectDetector:didFinishDetectionWithResult:timestampInMilliseconds:error:` to receive
* the results of performing asynchronous object detection on images (i.e, when `runningMode` =
* `MPPRunningModeLiveStream`).
* An object that confirms to `ObjectDetectorLiveStreamDelegate` protocol. This object must
* implement `objectDetector(_:didFinishDetectionWithResult:timestampInMilliseconds:error:)` to
* receive the results of performing asynchronous object detection on images (i.e, when
* `runningMode` =
* `.liveStream`).
*/
@property(nonatomic, weak, nullable) id<MPPObjectDetectorLiveStreamDelegate>
objectDetectorLiveStreamDelegate;

View File

@ -18,27 +18,27 @@
NS_ASSUME_NONNULL_BEGIN
/** Represents the detection results generated by `MPPObjectDetector`. */
/** Represents the detection results generated by `ObjectDetector`. */
NS_SWIFT_NAME(ObjectDetectorResult)
@interface MPPObjectDetectorResult : MPPTaskResult
/**
* The array of `MPPDetection` objects each of which has a bounding box that is expressed in the
* The array of `Detection` objects each of which has a bounding box that is expressed in the
* unrotated input frame of reference coordinates system, i.e. in `[0,image_width) x
* [0,image_height)`, which are the dimensions of the underlying image data.
*/
@property(nonatomic, readonly) NSArray<MPPDetection *> *detections;
/**
* Initializes a new `MPPObjectDetectorResult` with the given array of detections and timestamp (in
* Initializes a new `ObjectDetectorResult` with the given array of detections and timestamp (in
* milliseconds).
*
* @param detections An array of `MPPDetection` objects each of which has a bounding box that is
* @param detections An array of `Detection` objects each of which has a bounding box that is
* expressed in the unrotated input frame of reference coordinates system, i.e. in `[0,image_width)
* x [0,image_height)`, which are the dimensions of the underlying image data.
* @param timestampInMilliseconds The timestamp (in milliseconds) for this result.
*
* @return An instance of `MPPObjectDetectorResult` initialized with the given array of detections
* @return An instance of `ObjectDetectorResult` initialized with the given array of detections
* and timestamp (in milliseconds).
*/
- (instancetype)initWithDetections:(NSArray<MPPDetection *> *)detections