diff --git a/mediapipe/tasks/ios/components/containers/sources/MPPCategory.h b/mediapipe/tasks/ios/components/containers/sources/MPPCategory.h index 5753c4d3f..61c9e832f 100644 --- a/mediapipe/tasks/ios/components/containers/sources/MPPCategory.h +++ b/mediapipe/tasks/ios/components/containers/sources/MPPCategory.h @@ -44,15 +44,15 @@ NS_SWIFT_NAME(ResultCategory) @property(nonatomic, readonly, nullable) NSString *displayName; /** - * Initializes a new `Category` with the given index, score, category name and display name. + * Initializes a new `ResultCategory` with the given index, score, category name and display name. * * @param index The index of the label in the corresponding label file. * @param score The probability score of this label category. * @param categoryName The label of this category object. * @param displayName The display name of the label. * - * @return An instance of `Category` initialized with the given index, score, category name and - * display name. + * @return An instance of `ResultCategory` initialized with the given index, score, category name + * and display name. */ - (instancetype)initWithIndex:(NSInteger)index score:(float)score diff --git a/mediapipe/tasks/ios/vision/face_detector/sources/MPPFaceDetector.h b/mediapipe/tasks/ios/vision/face_detector/sources/MPPFaceDetector.h index 3dec361a6..8adb40679 100644 --- a/mediapipe/tasks/ios/vision/face_detector/sources/MPPFaceDetector.h +++ b/mediapipe/tasks/ios/vision/face_detector/sources/MPPFaceDetector.h @@ -80,10 +80,9 @@ NS_SWIFT_NAME(FaceDetector) error:(NSError **)error NS_DESIGNATED_INITIALIZER; /** - * Performs face detection on the provided MPPImage using the whole image as region of + * Performs face detection on the provided `MPImage` using the whole image as region of * interest. Rotation will be applied according to the `orientation` property of the provided - * `MPImage`. Only use this method when the `MPPFaceDetector` is created with running mode - * `.image`. + * `MPImage`. Only use this method when the `FaceDetector` is created with running mode `.image`. * * This method supports classification of RGBA images. If your `MPImage` has a source type of * `.pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the diff --git a/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarker.h b/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarker.h index 6c5c37512..ce4e991dd 100644 --- a/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarker.h +++ b/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarker.h @@ -44,7 +44,7 @@ NS_SWIFT_NAME(FaceLandmarker) * Creates a new instance of `FaceLandmarker` from the given `FaceLandmarkerOptions`. * * @param options The options of type `FaceLandmarkerOptions` to use for configuring the - * `MPPFaceLandmarker`. + * `FaceLandmarker`. * * @return A new instance of `FaceLandmarker` with the given options. `nil` if there is an error * in initializing the face landmaker. @@ -53,11 +53,11 @@ NS_SWIFT_NAME(FaceLandmarker) error:(NSError **)error NS_DESIGNATED_INITIALIZER; /** - * Performs face landmark detection on the provided MPPImage using the whole image as region of + * Performs face landmark detection on the provided `MPImage` using the whole image as region of * interest. Rotation will be applied according to the `orientation` property of the provided * `MPImage`. Only use this method when the `FaceLandmarker` is created with `.image`. * - * This method supports RGBA images. If your `MPPImage` has a source type of `.pixelBuffer` or + * This method supports RGBA images. If your `MPImage` has a source type of `.pixelBuffer` or * `.sampleBuffer`, the underlying pixel buffer must have one of the following pixel format * types: * 1. kCVPixelFormatType_32BGRA @@ -68,8 +68,8 @@ NS_SWIFT_NAME(FaceLandmarker) * * @param image The `MPImage` on which face landmark detection is to be performed. * - * @return An `MPPFaceLandmarkerResult` that contains a list of landmarks. `nil` if there is an - * error in initializing the face landmaker. + * @return An `FaceLandmarkerResult` that contains a list of landmarks. `nil` if there is an error + * in initializing the face landmaker. */ - (nullable MPPFaceLandmarkerResult *)detectInImage:(MPPImage *)image error:(NSError **)error NS_SWIFT_NAME(detect(image:)); @@ -77,8 +77,8 @@ NS_SWIFT_NAME(FaceLandmarker) /** * Performs face landmark detection on the provided video frame of type `MPImage` using the whole * image as region of interest. Rotation will be applied according to the `orientation` property of - * the provided `MPImage`. Only use this method when the `MPPFaceLandmarker` is created with - * running mode `.video`. + * the provided `MPImage`. Only use this method when the `FaceLandmarker` is created with running + * mode `.video`. * * This method supports RGBA images. If your `MPImage` has a source type of `.pixelBuffer` or * `.sampleBuffer`, the underlying pixel buffer must have one of the following pixel format types: diff --git a/mediapipe/tasks/ios/vision/gesture_recognizer/sources/MPPGestureRecognizer.h b/mediapipe/tasks/ios/vision/gesture_recognizer/sources/MPPGestureRecognizer.h index 65136dc83..458508718 100644 --- a/mediapipe/tasks/ios/vision/gesture_recognizer/sources/MPPGestureRecognizer.h +++ b/mediapipe/tasks/ios/vision/gesture_recognizer/sources/MPPGestureRecognizer.h @@ -31,51 +31,52 @@ NS_SWIFT_NAME(GestureRecognizer) @interface MPPGestureRecognizer : NSObject /** - * Creates a new instance of `MPPGestureRecognizer` from an absolute path to a TensorFlow Lite model - * file stored locally on the device and the default `MPPGestureRecognizerOptions`. + * Creates a new instance of `GestureRecognizer` from an absolute path to a TensorFlow Lite model + * file stored locally on the device and the default `GestureRecognizerOptions`. * * @param modelPath An absolute path to a TensorFlow Lite model file stored locally on the device. * @param error An optional error parameter populated when there is an error in initializing the * gesture recognizer. * - * @return A new instance of `MPPGestureRecognizer` with the given model path. `nil` if there is an + * @return A new instance of `GestureRecognizer` with the given model path. `nil` if there is an * error in initializing the gesture recognizer. */ - (nullable instancetype)initWithModelPath:(NSString *)modelPath error:(NSError **)error; /** - * Creates a new instance of `MPPGestureRecognizer` from the given `MPPGestureRecognizerOptions`. + * Creates a new instance of `GestureRecognizer` from the given `GestureRecognizerOptions`. * - * @param options The options of type `MPPGestureRecognizerOptions` to use for configuring the - * `MPPGestureRecognizer`. + * @param options The options of type `GestureRecognizerOptions` to use for configuring the + * `GestureRecognizer`. * @param error An optional error parameter populated when there is an error in initializing the * gesture recognizer. * - * @return A new instance of `MPPGestureRecognizer` with the given options. `nil` if there is an - * error in initializing the gesture recognizer. + * @return A new instance of `GestureRecognizer` with the given options. `nil` if there is an error + * in initializing the gesture recognizer. */ - (nullable instancetype)initWithOptions:(MPPGestureRecognizerOptions *)options error:(NSError **)error NS_DESIGNATED_INITIALIZER; /** - * Performs gesture recognition on the provided MPPImage using the whole image as region of + * Performs gesture recognition on the provided `MPImage` using the whole image as region of * interest. Rotation will be applied according to the `orientation` property of the provided - * `MPPImage`. Only use this method when the `MPPGestureRecognizer` is created with - * `MPPRunningModeImage`. - * This method supports gesture recognition of RGBA images. If your `MPPImage` has a source type of - * `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer - * must have one of the following pixel format types: + * `MPImage`. Only use this method when the `GestureRecognizer` is created with running mode, + * `.image`. + * + * This method supports gesture recognition of RGBA images. If your `MPImage` has a source type of + * `.pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the following + * pixel format types: * 1. kCVPixelFormatType_32BGRA * 2. kCVPixelFormatType_32RGBA * - * If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is - * RGB with an Alpha channel. + * If your `MPImage` has a source type of `.image` ensure that the color space is RGB with an Alpha + * channel. * - * @param image The `MPPImage` on which gesture recognition is to be performed. + * @param image The `MPImage` on which gesture recognition is to be performed. * @param error An optional error parameter populated when there is an error in performing gesture * recognition on the input image. * - * @return An `MPPGestureRecognizerResult` object that contains the hand gesture recognition + * @return An `GestureRecognizerResult` object that contains the hand gesture recognition * results. */ - (nullable MPPGestureRecognizerResult *)recognizeImage:(MPPImage *)image @@ -83,30 +84,30 @@ NS_SWIFT_NAME(GestureRecognizer) NS_SWIFT_NAME(recognize(image:)); /** - * Performs gesture recognition on the provided video frame of type `MPPImage` using the whole - * image as region of interest. Rotation will be applied according to the `orientation` property of - * the provided `MPPImage`. Only use this method when the `MPPGestureRecognizer` is created with - * `MPPRunningModeVideo`. + * Performs gesture recognition on the provided video frame of type `MPImage` using the whole image + * as region of interest. Rotation will be applied according to the `orientation` property of the + * provided `MPImage`. Only use this method when the `GestureRecognizer` is created with running + * mode, `.video`. * * It's required to provide the video frame's timestamp (in milliseconds). The input timestamps must * be monotonically increasing. * - * This method supports gesture recognition of RGBA images. If your `MPPImage` has a source type of - * `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer - * must have one of the following pixel format types: + * This method supports gesture recognition of RGBA images. If your `MPImage` has a source type of + * `.pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the following + * pixel format types: * 1. kCVPixelFormatType_32BGRA * 2. kCVPixelFormatType_32RGBA * - * If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is - * RGB with an Alpha channel. + * If your `MPImage` has a source type of `.image` ensure that the color space is RGB with an Alpha + * channel. * - * @param image The `MPPImage` on which gesture recognition is to be performed. + * @param image The `MPImage` on which gesture recognition is to be performed. * @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input * timestamps must be monotonically increasing. * @param error An optional error parameter populated when there is an error in performing gesture * recognition on the input video frame. * - * @return An `MPPGestureRecognizerResult` object that contains the hand gesture recognition + * @return An `GestureRecognizerResult` object that contains the hand gesture recognition * results. */ - (nullable MPPGestureRecognizerResult *)recognizeVideoFrame:(MPPImage *)image @@ -115,33 +116,33 @@ NS_SWIFT_NAME(GestureRecognizer) NS_SWIFT_NAME(recognize(videoFrame:timestampInMilliseconds:)); /** - * Sends live stream image data of type `MPPImage` to perform gesture recognition using the whole + * Sends live stream image data of type `MPImage` to perform gesture recognition using the whole * image as region of interest. Rotation will be applied according to the `orientation` property of - * the provided `MPPImage`. Only use this method when the `MPPGestureRecognizer` is created with - * `MPPRunningModeLiveStream`. + * the provided `MPImage`. Only use this method when the `GestureRecognizer` is created with running + * mode, `.liveStream`. * * The object which needs to be continuously notified of the available results of gesture - * recognition must confirm to `MPPGestureRecognizerLiveStreamDelegate` protocol and implement the - * `gestureRecognizer:didFinishRecognitionWithResult:timestampInMilliseconds:error:` + * recognition must confirm to `GestureRecognizerLiveStreamDelegate` protocol and implement the + * `gestureRecognizer(_:didFinishRecognitionWithResult:timestampInMilliseconds:error:)` * delegate method. * * It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent * to the gesture recognizer. The input timestamps must be monotonically increasing. * - * This method supports gesture recognition of RGBA images. If your `MPPImage` has a source type of - * `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer - * must have one of the following pixel format types: + * This method supports gesture recognition of RGBA images. If your `MPImage` has a source type of + * `.pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the following + * pixel format types: * 1. kCVPixelFormatType_32BGRA * 2. kCVPixelFormatType_32RGBA * - * If the input `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color - * space is RGB with an Alpha channel. + * If the input `MPImage` has a source type of `.image` ensure that the color space is RGB with an + * Alpha channel. * * If this method is used for performing gesture recognition on live camera frames using * `AVFoundation`, ensure that you request `AVCaptureVideoDataOutput` to output frames in * `kCMPixelFormat_32RGBA` using its `videoSettings` property. * - * @param image A live stream image data of type `MPPImage` on which gesture recognition is to be + * @param image A live stream image data of type `MPImage` on which gesture recognition is to be * performed. * @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input * image is sent to the gesture recognizer. The input timestamps must be monotonically increasing. diff --git a/mediapipe/tasks/ios/vision/gesture_recognizer/sources/MPPGestureRecognizerOptions.h b/mediapipe/tasks/ios/vision/gesture_recognizer/sources/MPPGestureRecognizerOptions.h index 323763cb5..ffc3be289 100644 --- a/mediapipe/tasks/ios/vision/gesture_recognizer/sources/MPPGestureRecognizerOptions.h +++ b/mediapipe/tasks/ios/vision/gesture_recognizer/sources/MPPGestureRecognizerOptions.h @@ -24,12 +24,12 @@ NS_ASSUME_NONNULL_BEGIN @class MPPGestureRecognizer; /** - * This protocol defines an interface for the delegates of `MPPGestureRecognizer` object to receive + * This protocol defines an interface for the delegates of `GestureRecognizer` object to receive * results of performing asynchronous gesture recognition on images (i.e, when `runningMode` = - * `MPPRunningModeLiveStream`). + * `.liveStream`). * - * The delegate of `MPPGestureRecognizer` must adopt `MPPGestureRecognizerLiveStreamDelegate` - * protocol. The methods in this protocol are optional. + * The delegate of `GestureRecognizer` must adopt `GestureRecognizerLiveStreamDelegate` protocol. + * The methods in this protocol are optional. */ NS_SWIFT_NAME(GestureRecognizerLiveStreamDelegate) @protocol MPPGestureRecognizerLiveStreamDelegate @@ -37,15 +37,15 @@ NS_SWIFT_NAME(GestureRecognizerLiveStreamDelegate) @optional /** - * This method notifies a delegate that the results of asynchronous gesture recognition of - * an image submitted to the `MPPGestureRecognizer` is available. + * This method notifies a delegate that the results of asynchronous gesture recognition of an image + * submitted to the `GestureRecognizer` is available. * - * This method is called on a private serial dispatch queue created by the `MPPGestureRecognizer` - * for performing the asynchronous delegates calls. + * This method is called on a private serial dispatch queue created by the `GestureRecognizer` for + * performing the asynchronous delegates calls. * - * @param gestureRecognizer The gesture recognizer which performed the gesture recognition. - * This is useful to test equality when there are multiple instances of `MPPGestureRecognizer`. - * @param result The `MPPGestureRecognizerResult` object that contains a list of detections, each + * @param gestureRecognizer The gesture recognizer which performed the gesture recognition. This is + * useful to test equality when there are multiple instances of `GestureRecognizer`. + * @param result The `GestureRecognizerResult` object that contains a list of detections, each * detection has a bounding box that is expressed in the unrotated input frame of reference * coordinates system, i.e. in `[0,image_width) x [0,image_height)`, which are the dimensions of the * underlying image data. @@ -62,26 +62,25 @@ NS_SWIFT_NAME(GestureRecognizerLiveStreamDelegate) NS_SWIFT_NAME(gestureRecognizer(_:didFinishGestureRecognition:timestampInMilliseconds:error:)); @end -/** Options for setting up a `MPPGestureRecognizer`. */ +/** Options for setting up a `GestureRecognizer`. */ NS_SWIFT_NAME(GestureRecognizerOptions) @interface MPPGestureRecognizerOptions : MPPTaskOptions /** - * Running mode of the gesture recognizer task. Defaults to `MPPRunningModeImage`. - * `MPPGestureRecognizer` can be created with one of the following running modes: - * 1. `MPPRunningModeImage`: The mode for performing gesture recognition on single image inputs. - * 2. `MPPRunningModeVideo`: The mode for performing gesture recognition on the decoded frames of a - * video. - * 3. `MPPRunningModeLiveStream`: The mode for performing gesture recognition on a live stream of - * input data, such as from the camera. + * Running mode of the gesture recognizer task. Defaults to `.video`. + * `GestureRecognizer` can be created with one of the following running modes: + * 1. `image`: The mode for performing gesture recognition on single image inputs. + * 2. `video`: The mode for performing gesture recognition on the decoded frames of a video. + * 3. `liveStream`: The mode for performing gesture recognition on a live stream of input data, + * such as from the camera. */ @property(nonatomic) MPPRunningMode runningMode; /** - * An object that confirms to `MPPGestureRecognizerLiveStreamDelegate` protocol. This object must - * implement `gestureRecognizer:didFinishRecognitionWithResult:timestampInMilliseconds:error:` to + * An object that confirms to `GestureRecognizerLiveStreamDelegate` protocol. This object must + * implement `gestureRecognizer(_:didFinishRecognitionWithResult:timestampInMilliseconds:error:)` to * receive the results of performing asynchronous gesture recognition on images (i.e, when - * `runningMode` = `MPPRunningModeLiveStream`). + * `runningMode` = `.liveStream`). */ @property(nonatomic, weak, nullable) id gestureRecognizerLiveStreamDelegate; @@ -99,18 +98,18 @@ NS_SWIFT_NAME(GestureRecognizerOptions) @property(nonatomic) float minTrackingConfidence; /** - * Sets the optional `MPPClassifierOptions` controlling the canned gestures classifier, such as - * score threshold, allow list and deny list of gestures. The categories for canned gesture - * classifiers are: ["None", "Closed_Fist", "Open_Palm", "Pointing_Up", "Thumb_Down", "Thumb_Up", - * "Victory", "ILoveYou"]. + * Sets the optional `ClassifierOptions` controlling the canned gestures classifier, such as score + * threshold, allow list and deny list of gestures. The categories for canned gesture classifiers + * are: ["None", "Closed_Fist", "Open_Palm", "Pointing_Up", "Thumb_Down", "Thumb_Up", "Victory", + * "ILoveYou"]. * * TODO: Note this option is subject to change, after scoring merging calculator is implemented. */ @property(nonatomic, copy, nullable) MPPClassifierOptions *cannedGesturesClassifierOptions; /** - * Sets the optional {@link ClassifierOptions} controlling the custom gestures classifier, such as - * score threshold, allow list and deny list of gestures. + * Sets the optional `ClassifierOptions` controlling the custom gestures classifier, such as score + * threshold, allow list and deny list of gestures. * * TODO: Note this option is subject to change, after scoring merging calculator is implemented. */ diff --git a/mediapipe/tasks/ios/vision/gesture_recognizer/sources/MPPGestureRecognizerResult.h b/mediapipe/tasks/ios/vision/gesture_recognizer/sources/MPPGestureRecognizerResult.h index d56df2855..278ae89b6 100644 --- a/mediapipe/tasks/ios/vision/gesture_recognizer/sources/MPPGestureRecognizerResult.h +++ b/mediapipe/tasks/ios/vision/gesture_recognizer/sources/MPPGestureRecognizerResult.h @@ -20,7 +20,7 @@ NS_ASSUME_NONNULL_BEGIN -/** Represents the gesture recognition results generated by MPPGestureRecognizer. */ +/** Represents the gesture recognition results generated by `GestureRecognizer`. */ NS_SWIFT_NAME(GestureRecognizerResult) @interface MPPGestureRecognizerResult : MPPTaskResult @@ -41,7 +41,7 @@ NS_SWIFT_NAME(GestureRecognizerResult) @property(nonatomic, readonly) NSArray *> *gestures; /** - * Initializes a new `MPPGestureRecognizerResult` with the given landmarks, world landmarks, + * Initializes a new `GestureRecognizerResult` with the given landmarks, world landmarks, * handedness, gestures and timestamp (in milliseconds). * * @param landmarks The hand landmarks of detected hands. @@ -50,7 +50,7 @@ NS_SWIFT_NAME(GestureRecognizerResult) * @param handedness The recognized hand gestures of detected hands. * @param timestampInMilliseconds The timestamp for this result. * - * @return An instance of `MPPGestureRecognizerResult` initialized with the given landmarks, world + * @return An instance of `GestureRecognizerResult` initialized with the given landmarks, world * landmarks, handedness and gestures. * */ diff --git a/mediapipe/tasks/ios/vision/hand_landmarker/BUILD b/mediapipe/tasks/ios/vision/hand_landmarker/BUILD index 677caf061..ba6f86e8f 100644 --- a/mediapipe/tasks/ios/vision/hand_landmarker/BUILD +++ b/mediapipe/tasks/ios/vision/hand_landmarker/BUILD @@ -71,9 +71,3 @@ objc_library( "//mediapipe/tasks/ios/vision/hand_landmarker/utils:MPPHandLandmarkerResultHelpers", ], ) - -objc_library( - name = "MPPHandLandmark", - hdrs = ["sources/MPPHandLandmark.h"], - module_name = "MPPHandLandmark", -) diff --git a/mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmark.h b/mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmark.h deleted file mode 100644 index fe08bde7e..000000000 --- a/mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmark.h +++ /dev/null @@ -1,65 +0,0 @@ -// Copyright 2023 The MediaPipe Authors. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#import - -NS_ASSUME_NONNULL_BEGIN - -/** - * The enum containing the 21 hand landmarks. - */ -typedef NS_ENUM(NSUInteger, MPPHandLandmark) { - MPPHandLandmarkWrist, - - MPPHandLandmarkThumbCMC, - - MPPHandLandmarkThumbMCP, - - MPPHandLandmarkThumbIP, - - MPPHandLandmarkIndexFingerMCP, - - MPPHandLandmarkIndexFingerPIP, - - MPPHandLandmarkIndexFingerDIP, - - MPPHandLandmarkIndexFingerTIP, - - MPPHandLandmarkMiddleFingerMCP, - - MPPHandLandmarkMiddleFingerPIP, - - MPPHandLandmarkMiddleFingerDIP, - - MPPHandLandmarkMiddleFingerTIP, - - MPPHandLandmarkRingFingerMCP, - - MPPHandLandmarkRingFingerPIP, - - MPPHandLandmarkRingFingerDIP, - - MPPHandLandmarkRingFingerTIP, - - MPPHandLandmarkPinkyMCP, - - MPPHandLandmarkPinkyPIP, - - MPPHandLandmarkPinkyDIP, - - MPPHandLandmarkPinkyTIP, - -} NS_SWIFT_NAME(HandLandmark); - -NS_ASSUME_NONNULL_END diff --git a/mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarker.h b/mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarker.h index 5a954af46..bed452cbc 100644 --- a/mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarker.h +++ b/mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarker.h @@ -21,6 +21,52 @@ NS_ASSUME_NONNULL_BEGIN +/** + * The enum containing the 21 hand landmarks. + */ +typedef NS_ENUM(NSUInteger, MPPHandLandmark) { + MPPHandLandmarkWrist, + + MPPHandLandmarkThumbCMC, + + MPPHandLandmarkThumbMCP, + + MPPHandLandmarkThumbIP, + + MPPHandLandmarkIndexFingerMCP, + + MPPHandLandmarkIndexFingerPIP, + + MPPHandLandmarkIndexFingerDIP, + + MPPHandLandmarkIndexFingerTIP, + + MPPHandLandmarkMiddleFingerMCP, + + MPPHandLandmarkMiddleFingerPIP, + + MPPHandLandmarkMiddleFingerDIP, + + MPPHandLandmarkMiddleFingerTIP, + + MPPHandLandmarkRingFingerMCP, + + MPPHandLandmarkRingFingerPIP, + + MPPHandLandmarkRingFingerDIP, + + MPPHandLandmarkRingFingerTIP, + + MPPHandLandmarkPinkyMCP, + + MPPHandLandmarkPinkyPIP, + + MPPHandLandmarkPinkyDIP, + + MPPHandLandmarkPinkyTIP, + +} NS_SWIFT_NAME(HandLandmark); + /** * @brief Performs hand landmarks detection on images. * @@ -48,82 +94,81 @@ NS_SWIFT_NAME(HandLandmarker) @property(class, nonatomic, readonly) NSArray *handConnections; /** - * Creates a new instance of `MPPHandLandmarker` from an absolute path to a model asset bundle - * stored locally on the device and the default `MPPHandLandmarkerOptions`. + * Creates a new instance of `HandLandmarker` from an absolute path to a model asset bundle stored + * locally on the device and the default `HandLandmarkerOptions`. * * @param modelPath An absolute path to a model asset bundle stored locally on the device. * @param error An optional error parameter populated when there is an error in initializing the * hand landmarker. * - * @return A new instance of `MPPHandLandmarker` with the given model path. `nil` if there is an - * error in initializing the hand landmarker. + * @return A new instance of `HandLandmarker` with the given model path. `nil` if there is an error + * in initializing the hand landmarker. */ - (nullable instancetype)initWithModelPath:(NSString *)modelPath error:(NSError **)error; /** - * Creates a new instance of `MPPHandLandmarker` from the given `MPPHandLandmarkerOptions`. + * Creates a new instance of `HandLandmarker` from the given `HandLandmarkerOptions`. * - * @param options The options of type `MPPHandLandmarkerOptions` to use for configuring the - * `MPPHandLandmarker`. + * @param options The options of type `HandLandmarkerOptions` to use for configuring the + * `HandLandmarker`. * @param error An optional error parameter populated when there is an error in initializing the * hand landmarker. * - * @return A new instance of `MPPHandLandmarker` with the given options. `nil` if there is an - * error in initializing the hand landmarker. + * @return A new instance of `HandLandmarker` with the given options. `nil` if there is an error in + * initializing the hand landmarker. */ - (nullable instancetype)initWithOptions:(MPPHandLandmarkerOptions *)options error:(NSError **)error NS_DESIGNATED_INITIALIZER; /** - * Performs hand landmarks detection on the provided `MPPImage` using the whole image as region of + * Performs hand landmarks detection on the provided `MPImage` using the whole image as region of * interest. Rotation will be applied according to the `orientation` property of the provided - * `MPPImage`. Only use this method when the `MPPHandLandmarker` is created with - * `MPPRunningModeImage`. + * `MPImage`. Only use this method when the `HandLandmarker` is created with running mode, `.image`. * - * This method supports performing hand landmarks detection on RGBA images. If your `MPPImage` has a - * source type of `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the - * underlying pixel buffer must have one of the following pixel format types: + * This method supports performing hand landmarks detection on RGBA images. If your `MPImage` has a + * source type of `.pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of + * the following pixel format types: * 1. kCVPixelFormatType_32BGRA * 2. kCVPixelFormatType_32RGBA * - * If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is - * RGB with an Alpha channel. + * If your `MPImage` has a source type of `.image` ensure that the color space is RGB with an Alpha + * channel. * - * @param image The `MPPImage` on which hand landmarks detection is to be performed. + * @param image The `MPImage` on which hand landmarks detection is to be performed. * @param error An optional error parameter populated when there is an error in performing hand * landmarks detection on the input image. * - * @return An `MPPHandLandmarkerResult` object that contains the hand hand landmarks detection + * @return An `HandLandmarkerResult` object that contains the hand hand landmarks detection * results. */ - (nullable MPPHandLandmarkerResult *)detectInImage:(MPPImage *)image error:(NSError **)error NS_SWIFT_NAME(detect(image:)); /** - * Performs hand landmarks detection on the provided video frame of type `MPPImage` using the whole + * Performs hand landmarks detection on the provided video frame of type `MPImage` using the whole * image as region of interest. Rotation will be applied according to the `orientation` property of - * the provided `MPPImage`. Only use this method when the `MPPHandLandmarker` is created with - * `MPPRunningModeVideo`. + * the provided `MPImage`. Only use this method when the `HandLandmarker` is created with running + * mode, `.video`. * * It's required to provide the video frame's timestamp (in milliseconds). The input timestamps must * be monotonically increasing. * - * This method supports performing hand landmarks detection on RGBA images. If your `MPPImage` has a - * source type of `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the - * underlying pixel buffer must have one of the following pixel format types: + * This method supports performing hand landmarks detection on RGBA images. If your `MPImage` has a + * source type of `.pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of + * the following pixel format types: * 1. kCVPixelFormatType_32BGRA * 2. kCVPixelFormatType_32RGBA * - * If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is - * RGB with an Alpha channel. + * If your `MPImage` has a source type of `.image` ensure that the color space is RGB with an Alpha + * channel. * - * @param image The `MPPImage` on which hand landmarks detection is to be performed. + * @param image The `MPImage` on which hand landmarks detection is to be performed. * @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input * timestamps must be monotonically increasing. * @param error An optional error parameter populated when there is an error in performing hand * landmarks detection on the input video frame. * - * @return An `MPPHandLandmarkerResult` object that contains the hand hand landmarks detection + * @return An `HandLandmarkerResult` object that contains the hand hand landmarks detection * results. */ - (nullable MPPHandLandmarkerResult *)detectInVideoFrame:(MPPImage *)image @@ -132,33 +177,32 @@ NS_SWIFT_NAME(HandLandmarker) NS_SWIFT_NAME(detect(videoFrame:timestampInMilliseconds:)); /** - * Sends live stream image data of type `MPPImage` to perform hand landmarks detection using the + * Sends live stream image data of type `MPImage` to perform hand landmarks detection using the * whole image as region of interest. Rotation will be applied according to the `orientation` - * property of the provided `MPPImage`. Only use this method when the `MPPHandLandmarker` is created - * with `MPPRunningModeLiveStream`. + * property of the provided `MPImage`. Only use this method when the `HandLandmarker` is created + * with running mode, `.liveStream`. * * The object which needs to be continuously notified of the available results of hand landmarks - * detection must confirm to `MPPHandLandmarkerLiveStreamDelegate` protocol and implement the - * `handLandmarker:didFinishDetectionWithResult:timestampInMilliseconds:error:` - * delegate method. + * detection must confirm to `HandLandmarkerLiveStreamDelegate` protocol and implement the + * `handLandmarker(_:didFinishDetectionWithResult:timestampInMilliseconds:error:)` delegate method. * * It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent * to the hand landmarker. The input timestamps must be monotonically increasing. * - * This method supports performing hand landmarks detection on RGBA images. If your `MPPImage` has a - * source type of `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the - * underlying pixel buffer must have one of the following pixel format types: + * This method supports performing hand landmarks detection on RGBA images. If your `MPImage` has a + * source type of `.pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of + * the following pixel format types: * 1. kCVPixelFormatType_32BGRA * 2. kCVPixelFormatType_32RGBA * - * If the input `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color - * space is RGB with an Alpha channel. + * If the input `MPImage` has a source type of `.image` ensure that the color space is RGB with an + * Alpha channel. * * If this method is used for performing hand landmarks detection on live camera frames using * `AVFoundation`, ensure that you request `AVCaptureVideoDataOutput` to output frames in * `kCMPixelFormat_32RGBA` using its `videoSettings` property. * - * @param image A live stream image data of type `MPPImage` on which hand landmarks detection is to + * @param image A live stream image data of type `MPImage` on which hand landmarks detection is to * be performed. * @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input * image is sent to the hand landmarker. The input timestamps must be monotonically increasing. diff --git a/mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarkerOptions.h b/mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarkerOptions.h index 4da78ecc1..a31341714 100644 --- a/mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarkerOptions.h +++ b/mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarkerOptions.h @@ -23,11 +23,11 @@ NS_ASSUME_NONNULL_BEGIN @class MPPHandLandmarker; /** - * This protocol defines an interface for the delegates of `MPPHandLandmarker` object to receive - * results of performing asynchronous hand landmark detection on images (i.e, when `runningMode` = - * `MPPRunningModeLiveStream`). + * This protocol defines an interface for the delegates of `HandLandmarker` object to receive + * results of performing asynchronous hand landmark detection on images (i.e, when + * `runningMode` = `.liveStream`). * - * The delegate of `MPPHandLandmarker` must adopt `MPPHandLandmarkerLiveStreamDelegate` protocol. + * The delegate of `HandLandmarker` must adopt `HandLandmarkerLiveStreamDelegate` protocol. * The methods in this protocol are optional. */ NS_SWIFT_NAME(HandLandmarkerLiveStreamDelegate) @@ -37,14 +37,14 @@ NS_SWIFT_NAME(HandLandmarkerLiveStreamDelegate) /** * This method notifies a delegate that the results of asynchronous hand landmark detection of an - * image submitted to the `MPPHandLandmarker` is available. + * image submitted to the `HandLandmarker` is available. * - * This method is called on a private serial dispatch queue created by the `MPPHandLandmarker` - * for performing the asynchronous delegates calls. + * This method is called on a private serial dispatch queue created by the `HandLandmarker` for + * performing the asynchronous delegates calls. * * @param handLandmarker The hand landmarker which performed the hand landmarking. - * This is useful to test equality when there are multiple instances of `MPPHandLandmarker`. - * @param result The `MPPHandLandmarkerResult` object that contains a list of detections, each + * This is useful to test equality when there are multiple instances of `HandLandmarker`. + * @param result The `HandLandmarkerResult` object that contains a list of detections, each * detection has a bounding box that is expressed in the unrotated input frame of reference * coordinates system, i.e. in `[0,image_width) x [0,image_height)`, which are the dimensions of the * underlying image data. @@ -60,32 +60,30 @@ NS_SWIFT_NAME(HandLandmarkerLiveStreamDelegate) NS_SWIFT_NAME(handLandmarker(_:didFinishDetection:timestampInMilliseconds:error:)); @end -/** Options for setting up a `MPPHandLandmarker`. */ +/** Options for setting up a `HandLandmarker`. */ NS_SWIFT_NAME(HandLandmarkerOptions) @interface MPPHandLandmarkerOptions : MPPTaskOptions /** - * Running mode of the hand landmarker task. Defaults to `MPPRunningModeImage`. - * `MPPHandLandmarker` can be created with one of the following running modes: - * 1. `MPPRunningModeImage`: The mode for performing hand landmark detection on single image - * inputs. - * 2. `MPPRunningModeVideo`: The mode for performing hand landmark detection on the decoded frames - * of a video. - * 3. `MPPRunningModeLiveStream`: The mode for performing hand landmark detection on a live stream - * of input data, such as from the camera. + * Running mode of the hand landmarker task. Defaults to `.image`. + * `HandLandmarker` can be created with one of the following running modes: + * 1. `image`: The mode for performing hand landmark detection on single image inputs. + * 2. `video`: The mode for performing hand landmark detection on the decoded frames of a video. + * 3. `liveStream`: The mode for performing hand landmark detection on a live stream of input data, + * such as from the camera. */ @property(nonatomic) MPPRunningMode runningMode; /** - * An object that confirms to `MPPHandLandmarkerLiveStreamDelegate` protocol. This object must + * An object that confirms to `HandLandmarkerLiveStreamDelegate` protocol. This object must * implement `handLandmarker:didFinishDetectionWithResult:timestampInMilliseconds:error:` to * receive the results of performing asynchronous hand landmark detection on images (i.e, when - * `runningMode` = `MPPRunningModeLiveStream`). + * `runningMode` = `.liveStream`). */ @property(nonatomic, weak, nullable) id handLandmarkerLiveStreamDelegate; -/** The maximum number of hands that can be detected by the `MPPHandLandmarker`. */ +/** The maximum number of hands that can be detected by the `HandLandmarker`. */ @property(nonatomic) NSInteger numHands; /** The minimum confidence score for the hand detection to be considered successful. */ diff --git a/mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarkerResult.h b/mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarkerResult.h index 238aa406a..2c00e9ee5 100644 --- a/mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarkerResult.h +++ b/mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarkerResult.h @@ -20,7 +20,7 @@ NS_ASSUME_NONNULL_BEGIN -/** Represents the hand landmarker results generated by MPPHandLandmarker. */ +/** Represents the hand landmarker results generated by `HandLandmarker`. */ NS_SWIFT_NAME(HandLandmarkerResult) @interface MPPHandLandmarkerResult : MPPTaskResult @@ -34,15 +34,15 @@ NS_SWIFT_NAME(HandLandmarkerResult) @property(nonatomic, readonly) NSArray *> *handedness; /** - * Initializes a new `MPPHandLandmarkerResult` with the given landmarks, world landmarks, - * handedness and timestamp (in milliseconds). + * Initializes a new `HandLandmarkerResult` with the given landmarks, world landmarks, handedness + * and timestamp (in milliseconds). * * @param landmarks The hand landmarks of detected hands. * @param worldLandmarks The hand landmarks in world coordniates of detected hands. * @param handedness The handedness of detected hands. * @param timestampInMilliseconds The timestamp for this result. * - * @return An instance of `MPPGHandLandmarkerResult` initialized with the given landmarks, world + * @return An instance of `HandLandmarkerResult` initialized with the given landmarks, world * landmarks, handedness and timestamp (in milliseconds). * */ diff --git a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.h b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.h index a22dc632d..5b9b24fb6 100644 --- a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.h +++ b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.h @@ -76,7 +76,7 @@ NS_SWIFT_NAME(ImageClassifier) error:(NSError **)error NS_DESIGNATED_INITIALIZER; /** - * Performs image classification on the provided MPPImage using the whole image as region of + * Performs image classification on the provided `MPImage` using the whole image as region of * interest. Rotation will be applied according to the `orientation` property of the provided * `MPImage`. Only use this method when the `ImageClassifier` is created with running mode, * `.image`. @@ -90,7 +90,7 @@ NS_SWIFT_NAME(ImageClassifier) * If your `MPImage` has a source type of `.image` ensure that the color space is RGB with an Alpha * channel. * - * @param image The `MPPImage` on which image classification is to be performed. + * @param image The `MPImage` on which image classification is to be performed. * * @return An `ImageClassifierResult` object that contains a list of image classifications. */ @@ -101,7 +101,7 @@ NS_SWIFT_NAME(ImageClassifier) /** * Performs image classification on the provided `MPImage` cropped to the specified region of * interest. Rotation will be applied on the cropped image according to the `orientation` property - * of the provided `MPImage`. Only use this method when the `MPPImageClassifier` is created with + * of the provided `MPImage`. Only use this method when the `ImageClassifier` is created with * running mode, `.image`. * * This method supports classification of RGBA images. If your `MPImage` has a source type of @@ -127,7 +127,7 @@ NS_SWIFT_NAME(ImageClassifier) /** * Performs image classification on the provided video frame of type `MPImage` using the whole * image as region of interest. Rotation will be applied according to the `orientation` property of - * the provided `MPImage`. Only use this method when the `MPPImageClassifier` is created with + * the provided `MPImage`. Only use this method when the `ImageClassifier` is created with * running mode `.video`. * * It's required to provide the video frame's timestamp (in milliseconds). The input timestamps must @@ -142,7 +142,7 @@ NS_SWIFT_NAME(ImageClassifier) * If your `MPImage` has a source type of `.image` ensure that the color space is RGB with an Alpha * channel. * - * @param image The `MPPImage` on which image classification is to be performed. + * @param image The `MPImage` on which image classification is to be performed. * @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input * timestamps must be monotonically increasing. * @@ -188,8 +188,8 @@ NS_SWIFT_NAME(ImageClassifier) /** * Sends live stream image data of type `MPImage` to perform image classification using the whole * image as region of interest. Rotation will be applied according to the `orientation` property of - * the provided `MPImage`. Only use this method when the `ImageClassifier` is created with - * `MPPRunningModeLiveStream`. + * the provided `MPImage`. Only use this method when the `ImageClassifier` is created with running + * mode `.liveStream`. * * The object which needs to be continuously notified of the available results of image * classification must confirm to `ImageClassifierLiveStreamDelegate` protocol and implement the diff --git a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierOptions.h b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierOptions.h index 72f8859b5..bfd136883 100644 --- a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierOptions.h +++ b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierOptions.h @@ -24,8 +24,7 @@ NS_ASSUME_NONNULL_BEGIN /** * This protocol defines an interface for the delegates of `ImageClassifier` object to receive - * results of asynchronous classification of images (i.e, when `runningMode = - * .liveStream`). + * results of asynchronous classification of images (i.e, when `runningMode` = `.liveStream`). * * The delegate of `ImageClassifier` must adopt `ImageClassifierLiveStreamDelegate` protocol. * The methods in this protocol are optional. diff --git a/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetector.h b/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetector.h index 851e8a355..82721f47b 100644 --- a/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetector.h +++ b/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetector.h @@ -118,8 +118,7 @@ NS_SWIFT_NAME(ObjectDetector) /** * Performs object detection on the provided video frame of type `MPImage` using the whole * image as region of interest. Rotation will be applied according to the `orientation` property of - * the provided `MPImage`. Only use this method when the `MPPObjectDetector` is created with - * `.video`. + * the provided `MPImage`. Only use this method when the `ObjectDetector` is created with `.video`. * * This method supports detecting objects in of RGBA images. If your `MPImage` has a source type of * .pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the following @@ -170,7 +169,7 @@ NS_SWIFT_NAME(ObjectDetector) * that you request `AVCaptureVideoDataOutput` to output frames in `kCMPixelFormat_32RGBA` using its * `videoSettings` property. * - * @param image A live stream image data of type `MPPImage` on which object detection is to be + * @param image A live stream image data of type `MPImage` on which object detection is to be * performed. * @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input * image is sent to the object detector. The input timestamps must be monotonically increasing. diff --git a/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetectorOptions.h b/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetectorOptions.h index 0060d3749..d31805a36 100644 --- a/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetectorOptions.h +++ b/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetectorOptions.h @@ -79,8 +79,7 @@ NS_SWIFT_NAME(ObjectDetectorOptions) * An object that confirms to `ObjectDetectorLiveStreamDelegate` protocol. This object must * implement `objectDetector(_:didFinishDetectionWithResult:timestampInMilliseconds:error:)` to * receive the results of performing asynchronous object detection on images (i.e, when - * `runningMode` = - * `.liveStream`). + * `runningMode` = `.liveStream`). */ @property(nonatomic, weak, nullable) id objectDetectorLiveStreamDelegate;