diff --git a/mediapipe/tasks/ios/vision/gesture_recognizer/sources/MPPGestureRecognizer.h b/mediapipe/tasks/ios/vision/gesture_recognizer/sources/MPPGestureRecognizer.h index 65136dc83..458508718 100644 --- a/mediapipe/tasks/ios/vision/gesture_recognizer/sources/MPPGestureRecognizer.h +++ b/mediapipe/tasks/ios/vision/gesture_recognizer/sources/MPPGestureRecognizer.h @@ -31,51 +31,52 @@ NS_SWIFT_NAME(GestureRecognizer) @interface MPPGestureRecognizer : NSObject /** - * Creates a new instance of `MPPGestureRecognizer` from an absolute path to a TensorFlow Lite model - * file stored locally on the device and the default `MPPGestureRecognizerOptions`. + * Creates a new instance of `GestureRecognizer` from an absolute path to a TensorFlow Lite model + * file stored locally on the device and the default `GestureRecognizerOptions`. * * @param modelPath An absolute path to a TensorFlow Lite model file stored locally on the device. * @param error An optional error parameter populated when there is an error in initializing the * gesture recognizer. * - * @return A new instance of `MPPGestureRecognizer` with the given model path. `nil` if there is an + * @return A new instance of `GestureRecognizer` with the given model path. `nil` if there is an * error in initializing the gesture recognizer. */ - (nullable instancetype)initWithModelPath:(NSString *)modelPath error:(NSError **)error; /** - * Creates a new instance of `MPPGestureRecognizer` from the given `MPPGestureRecognizerOptions`. + * Creates a new instance of `GestureRecognizer` from the given `GestureRecognizerOptions`. * - * @param options The options of type `MPPGestureRecognizerOptions` to use for configuring the - * `MPPGestureRecognizer`. + * @param options The options of type `GestureRecognizerOptions` to use for configuring the + * `GestureRecognizer`. * @param error An optional error parameter populated when there is an error in initializing the * gesture recognizer. * - * @return A new instance of `MPPGestureRecognizer` with the given options. `nil` if there is an - * error in initializing the gesture recognizer. + * @return A new instance of `GestureRecognizer` with the given options. `nil` if there is an error + * in initializing the gesture recognizer. */ - (nullable instancetype)initWithOptions:(MPPGestureRecognizerOptions *)options error:(NSError **)error NS_DESIGNATED_INITIALIZER; /** - * Performs gesture recognition on the provided MPPImage using the whole image as region of + * Performs gesture recognition on the provided `MPImage` using the whole image as region of * interest. Rotation will be applied according to the `orientation` property of the provided - * `MPPImage`. Only use this method when the `MPPGestureRecognizer` is created with - * `MPPRunningModeImage`. - * This method supports gesture recognition of RGBA images. If your `MPPImage` has a source type of - * `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer - * must have one of the following pixel format types: + * `MPImage`. Only use this method when the `GestureRecognizer` is created with running mode, + * `.image`. + * + * This method supports gesture recognition of RGBA images. If your `MPImage` has a source type of + * `.pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the following + * pixel format types: * 1. kCVPixelFormatType_32BGRA * 2. kCVPixelFormatType_32RGBA * - * If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is - * RGB with an Alpha channel. + * If your `MPImage` has a source type of `.image` ensure that the color space is RGB with an Alpha + * channel. * - * @param image The `MPPImage` on which gesture recognition is to be performed. + * @param image The `MPImage` on which gesture recognition is to be performed. * @param error An optional error parameter populated when there is an error in performing gesture * recognition on the input image. * - * @return An `MPPGestureRecognizerResult` object that contains the hand gesture recognition + * @return An `GestureRecognizerResult` object that contains the hand gesture recognition * results. */ - (nullable MPPGestureRecognizerResult *)recognizeImage:(MPPImage *)image @@ -83,30 +84,30 @@ NS_SWIFT_NAME(GestureRecognizer) NS_SWIFT_NAME(recognize(image:)); /** - * Performs gesture recognition on the provided video frame of type `MPPImage` using the whole - * image as region of interest. Rotation will be applied according to the `orientation` property of - * the provided `MPPImage`. Only use this method when the `MPPGestureRecognizer` is created with - * `MPPRunningModeVideo`. + * Performs gesture recognition on the provided video frame of type `MPImage` using the whole image + * as region of interest. Rotation will be applied according to the `orientation` property of the + * provided `MPImage`. Only use this method when the `GestureRecognizer` is created with running + * mode, `.video`. * * It's required to provide the video frame's timestamp (in milliseconds). The input timestamps must * be monotonically increasing. * - * This method supports gesture recognition of RGBA images. If your `MPPImage` has a source type of - * `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer - * must have one of the following pixel format types: + * This method supports gesture recognition of RGBA images. If your `MPImage` has a source type of + * `.pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the following + * pixel format types: * 1. kCVPixelFormatType_32BGRA * 2. kCVPixelFormatType_32RGBA * - * If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is - * RGB with an Alpha channel. + * If your `MPImage` has a source type of `.image` ensure that the color space is RGB with an Alpha + * channel. * - * @param image The `MPPImage` on which gesture recognition is to be performed. + * @param image The `MPImage` on which gesture recognition is to be performed. * @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input * timestamps must be monotonically increasing. * @param error An optional error parameter populated when there is an error in performing gesture * recognition on the input video frame. * - * @return An `MPPGestureRecognizerResult` object that contains the hand gesture recognition + * @return An `GestureRecognizerResult` object that contains the hand gesture recognition * results. */ - (nullable MPPGestureRecognizerResult *)recognizeVideoFrame:(MPPImage *)image @@ -115,33 +116,33 @@ NS_SWIFT_NAME(GestureRecognizer) NS_SWIFT_NAME(recognize(videoFrame:timestampInMilliseconds:)); /** - * Sends live stream image data of type `MPPImage` to perform gesture recognition using the whole + * Sends live stream image data of type `MPImage` to perform gesture recognition using the whole * image as region of interest. Rotation will be applied according to the `orientation` property of - * the provided `MPPImage`. Only use this method when the `MPPGestureRecognizer` is created with - * `MPPRunningModeLiveStream`. + * the provided `MPImage`. Only use this method when the `GestureRecognizer` is created with running + * mode, `.liveStream`. * * The object which needs to be continuously notified of the available results of gesture - * recognition must confirm to `MPPGestureRecognizerLiveStreamDelegate` protocol and implement the - * `gestureRecognizer:didFinishRecognitionWithResult:timestampInMilliseconds:error:` + * recognition must confirm to `GestureRecognizerLiveStreamDelegate` protocol and implement the + * `gestureRecognizer(_:didFinishRecognitionWithResult:timestampInMilliseconds:error:)` * delegate method. * * It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent * to the gesture recognizer. The input timestamps must be monotonically increasing. * - * This method supports gesture recognition of RGBA images. If your `MPPImage` has a source type of - * `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer - * must have one of the following pixel format types: + * This method supports gesture recognition of RGBA images. If your `MPImage` has a source type of + * `.pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the following + * pixel format types: * 1. kCVPixelFormatType_32BGRA * 2. kCVPixelFormatType_32RGBA * - * If the input `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color - * space is RGB with an Alpha channel. + * If the input `MPImage` has a source type of `.image` ensure that the color space is RGB with an + * Alpha channel. * * If this method is used for performing gesture recognition on live camera frames using * `AVFoundation`, ensure that you request `AVCaptureVideoDataOutput` to output frames in * `kCMPixelFormat_32RGBA` using its `videoSettings` property. * - * @param image A live stream image data of type `MPPImage` on which gesture recognition is to be + * @param image A live stream image data of type `MPImage` on which gesture recognition is to be * performed. * @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input * image is sent to the gesture recognizer. The input timestamps must be monotonically increasing. diff --git a/mediapipe/tasks/ios/vision/gesture_recognizer/sources/MPPGestureRecognizerOptions.h b/mediapipe/tasks/ios/vision/gesture_recognizer/sources/MPPGestureRecognizerOptions.h index 323763cb5..ffc3be289 100644 --- a/mediapipe/tasks/ios/vision/gesture_recognizer/sources/MPPGestureRecognizerOptions.h +++ b/mediapipe/tasks/ios/vision/gesture_recognizer/sources/MPPGestureRecognizerOptions.h @@ -24,12 +24,12 @@ NS_ASSUME_NONNULL_BEGIN @class MPPGestureRecognizer; /** - * This protocol defines an interface for the delegates of `MPPGestureRecognizer` object to receive + * This protocol defines an interface for the delegates of `GestureRecognizer` object to receive * results of performing asynchronous gesture recognition on images (i.e, when `runningMode` = - * `MPPRunningModeLiveStream`). + * `.liveStream`). * - * The delegate of `MPPGestureRecognizer` must adopt `MPPGestureRecognizerLiveStreamDelegate` - * protocol. The methods in this protocol are optional. + * The delegate of `GestureRecognizer` must adopt `GestureRecognizerLiveStreamDelegate` protocol. + * The methods in this protocol are optional. */ NS_SWIFT_NAME(GestureRecognizerLiveStreamDelegate) @protocol MPPGestureRecognizerLiveStreamDelegate @@ -37,15 +37,15 @@ NS_SWIFT_NAME(GestureRecognizerLiveStreamDelegate) @optional /** - * This method notifies a delegate that the results of asynchronous gesture recognition of - * an image submitted to the `MPPGestureRecognizer` is available. + * This method notifies a delegate that the results of asynchronous gesture recognition of an image + * submitted to the `GestureRecognizer` is available. * - * This method is called on a private serial dispatch queue created by the `MPPGestureRecognizer` - * for performing the asynchronous delegates calls. + * This method is called on a private serial dispatch queue created by the `GestureRecognizer` for + * performing the asynchronous delegates calls. * - * @param gestureRecognizer The gesture recognizer which performed the gesture recognition. - * This is useful to test equality when there are multiple instances of `MPPGestureRecognizer`. - * @param result The `MPPGestureRecognizerResult` object that contains a list of detections, each + * @param gestureRecognizer The gesture recognizer which performed the gesture recognition. This is + * useful to test equality when there are multiple instances of `GestureRecognizer`. + * @param result The `GestureRecognizerResult` object that contains a list of detections, each * detection has a bounding box that is expressed in the unrotated input frame of reference * coordinates system, i.e. in `[0,image_width) x [0,image_height)`, which are the dimensions of the * underlying image data. @@ -62,26 +62,25 @@ NS_SWIFT_NAME(GestureRecognizerLiveStreamDelegate) NS_SWIFT_NAME(gestureRecognizer(_:didFinishGestureRecognition:timestampInMilliseconds:error:)); @end -/** Options for setting up a `MPPGestureRecognizer`. */ +/** Options for setting up a `GestureRecognizer`. */ NS_SWIFT_NAME(GestureRecognizerOptions) @interface MPPGestureRecognizerOptions : MPPTaskOptions /** - * Running mode of the gesture recognizer task. Defaults to `MPPRunningModeImage`. - * `MPPGestureRecognizer` can be created with one of the following running modes: - * 1. `MPPRunningModeImage`: The mode for performing gesture recognition on single image inputs. - * 2. `MPPRunningModeVideo`: The mode for performing gesture recognition on the decoded frames of a - * video. - * 3. `MPPRunningModeLiveStream`: The mode for performing gesture recognition on a live stream of - * input data, such as from the camera. + * Running mode of the gesture recognizer task. Defaults to `.video`. + * `GestureRecognizer` can be created with one of the following running modes: + * 1. `image`: The mode for performing gesture recognition on single image inputs. + * 2. `video`: The mode for performing gesture recognition on the decoded frames of a video. + * 3. `liveStream`: The mode for performing gesture recognition on a live stream of input data, + * such as from the camera. */ @property(nonatomic) MPPRunningMode runningMode; /** - * An object that confirms to `MPPGestureRecognizerLiveStreamDelegate` protocol. This object must - * implement `gestureRecognizer:didFinishRecognitionWithResult:timestampInMilliseconds:error:` to + * An object that confirms to `GestureRecognizerLiveStreamDelegate` protocol. This object must + * implement `gestureRecognizer(_:didFinishRecognitionWithResult:timestampInMilliseconds:error:)` to * receive the results of performing asynchronous gesture recognition on images (i.e, when - * `runningMode` = `MPPRunningModeLiveStream`). + * `runningMode` = `.liveStream`). */ @property(nonatomic, weak, nullable) id gestureRecognizerLiveStreamDelegate; @@ -99,18 +98,18 @@ NS_SWIFT_NAME(GestureRecognizerOptions) @property(nonatomic) float minTrackingConfidence; /** - * Sets the optional `MPPClassifierOptions` controlling the canned gestures classifier, such as - * score threshold, allow list and deny list of gestures. The categories for canned gesture - * classifiers are: ["None", "Closed_Fist", "Open_Palm", "Pointing_Up", "Thumb_Down", "Thumb_Up", - * "Victory", "ILoveYou"]. + * Sets the optional `ClassifierOptions` controlling the canned gestures classifier, such as score + * threshold, allow list and deny list of gestures. The categories for canned gesture classifiers + * are: ["None", "Closed_Fist", "Open_Palm", "Pointing_Up", "Thumb_Down", "Thumb_Up", "Victory", + * "ILoveYou"]. * * TODO: Note this option is subject to change, after scoring merging calculator is implemented. */ @property(nonatomic, copy, nullable) MPPClassifierOptions *cannedGesturesClassifierOptions; /** - * Sets the optional {@link ClassifierOptions} controlling the custom gestures classifier, such as - * score threshold, allow list and deny list of gestures. + * Sets the optional `ClassifierOptions` controlling the custom gestures classifier, such as score + * threshold, allow list and deny list of gestures. * * TODO: Note this option is subject to change, after scoring merging calculator is implemented. */ diff --git a/mediapipe/tasks/ios/vision/gesture_recognizer/sources/MPPGestureRecognizerResult.h b/mediapipe/tasks/ios/vision/gesture_recognizer/sources/MPPGestureRecognizerResult.h index d56df2855..278ae89b6 100644 --- a/mediapipe/tasks/ios/vision/gesture_recognizer/sources/MPPGestureRecognizerResult.h +++ b/mediapipe/tasks/ios/vision/gesture_recognizer/sources/MPPGestureRecognizerResult.h @@ -20,7 +20,7 @@ NS_ASSUME_NONNULL_BEGIN -/** Represents the gesture recognition results generated by MPPGestureRecognizer. */ +/** Represents the gesture recognition results generated by `GestureRecognizer`. */ NS_SWIFT_NAME(GestureRecognizerResult) @interface MPPGestureRecognizerResult : MPPTaskResult @@ -41,7 +41,7 @@ NS_SWIFT_NAME(GestureRecognizerResult) @property(nonatomic, readonly) NSArray *> *gestures; /** - * Initializes a new `MPPGestureRecognizerResult` with the given landmarks, world landmarks, + * Initializes a new `GestureRecognizerResult` with the given landmarks, world landmarks, * handedness, gestures and timestamp (in milliseconds). * * @param landmarks The hand landmarks of detected hands. @@ -50,7 +50,7 @@ NS_SWIFT_NAME(GestureRecognizerResult) * @param handedness The recognized hand gestures of detected hands. * @param timestampInMilliseconds The timestamp for this result. * - * @return An instance of `MPPGestureRecognizerResult` initialized with the given landmarks, world + * @return An instance of `GestureRecognizerResult` initialized with the given landmarks, world * landmarks, handedness and gestures. * */