Merge pull request #4900 from priankakariatyml:ios-pose-landmarker-impl

PiperOrigin-RevId: 578991151
This commit is contained in:
Copybara-Service 2023-11-02 15:23:04 -07:00
commit 1b8a0ee6af
7 changed files with 426 additions and 1 deletions

View File

@ -37,3 +37,23 @@ objc_library(
"//mediapipe/tasks/ios/vision/core:MPPRunningMode", "//mediapipe/tasks/ios/vision/core:MPPRunningMode",
], ],
) )
objc_library(
name = "MPPPoseLandmarksConnections",
hdrs = ["sources/MPPPoseLandmarksConnections.h"],
module_name = "MPPPoseLandmarksConnections",
deps = ["//mediapipe/tasks/ios/components/containers:MPPConnection"],
)
objc_library(
name = "MPPPoseLandmarker",
hdrs = ["sources/MPPPoseLandmarker.h"],
module_name = "MPPPoseLandmarker",
deps = [
":MPPPoseLandmarkerOptions",
":MPPPoseLandmarkerResult",
":MPPPoseLandmarksConnections",
"//mediapipe/tasks/ios/components/containers:MPPConnection",
"//mediapipe/tasks/ios/vision/core:MPPImage",
],
)

View File

@ -0,0 +1,160 @@
// Copyright 2023 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#import <Foundation/Foundation.h>
#import "mediapipe/tasks/ios/components/containers/sources/MPPConnection.h"
#import "mediapipe/tasks/ios/vision/core/sources/MPPImage.h"
#import "mediapipe/tasks/ios/vision/pose_landmarker/sources/MPPPoseLandmarkerOptions.h"
#import "mediapipe/tasks/ios/vision/pose_landmarker/sources/MPPPoseLandmarkerResult.h"
NS_ASSUME_NONNULL_BEGIN
/**
* @brief Performs pose landmarks detection on images.
*
* This API expects a pre-trained pose landmarks model asset bundle.
*/
NS_SWIFT_NAME(PoseLandmarker)
@interface MPPPoseLandmarker : NSObject
/** The array of connections between all the landmarks in the detected pose. */
@property(class, nonatomic, readonly) NSArray<MPPConnection *> *poseLandmarks;
/**
* Creates a new instance of `PoseLandmarker` from an absolute path to a model asset bundle stored
* locally on the device and the default `PoseLandmarkerOptions`.
*
* @param modelPath An absolute path to a model asset bundle stored locally on the device.
* @param error An optional error parameter populated when there is an error in initializing the
* pose landmarker.
*
* @return A new instance of `PoseLandmarker` with the given model path. `nil` if there is an error
* in initializing the pose landmarker.
*/
- (nullable instancetype)initWithModelPath:(NSString *)modelPath error:(NSError **)error;
/**
* Creates a new instance of `PoseLandmarker` from the given `PoseLandmarkerOptions`.
*
* @param options The options of type `PoseLandmarkerOptions` to use for configuring the
* `PoseLandmarker`.
* @param error An optional error parameter populated when there is an error in initializing the
* pose landmarker.
*
* @return A new instance of `PoseLandmarker` with the given options. `nil` if there is an error in
* initializing the pose landmarker.
*/
- (nullable instancetype)initWithOptions:(MPPPoseLandmarkerOptions *)options
error:(NSError **)error NS_DESIGNATED_INITIALIZER;
/**
* Performs pose landmarks detection on the provided `MPImage` using the whole image as region of
* interest. Rotation will be applied according to the `orientation` property of the provided
* `MPImage`. Only use this method when the `PoseLandmarker` is created with running mode `.image`.
*
* This method supports performing pose landmarks detection on RGBA images. If your `MPImage` has a
* source type of `.pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must use
* `kCVPixelFormatType_32BGRA` as its pixel format.
*
*
* If your `MPImage` has a source type of `.image` ensure that the color space is RGB with an Alpha
* channel.
*
* @param image The `MPImage` on which pose landmarks detection is to be performed.
* @param error An optional error parameter populated when there is an error in performing pose
* landmark detection on the input image.
*
* @return An `PoseLandmarkerResult` object that contains the pose landmarks detection
* results.
*/
- (nullable MPPPoseLandmarkerResult *)detectImage:(MPPImage *)image
error:(NSError **)error NS_SWIFT_NAME(detect(image:));
/**
* Performs pose landmarks detection on the provided video frame of type `MPImage` using the whole
* image as region of interest. Rotation will be applied according to the `orientation` property of
* the provided `MPImage`. Only use this method when the `PoseLandmarker` is created with running
* mode `.video`.
*
* It's required to provide the video frame's timestamp (in milliseconds). The input timestamps must
* be monotonically increasing.
*
* This method supports performing pose landmarks detection on RGBA images. If your `MPImage` has a
* source type of `.pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must use
* `kCVPixelFormatType_32BGRA` as its pixel format.
*
*
* If your `MPImage` has a source type of `.image` ensure that the color space is RGB with an Alpha
* channel.
*
* @param image The `MPImage` on which pose landmarks detection is to be performed.
* @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input
* timestamps must be monotonically increasing.
* @param error An optional error parameter populated when there is an error in performing pose
* landmark detection on the input video frame.
*
* @return An `PoseLandmarkerResult` object that contains the pose landmarks detection
* results.
*/
- (nullable MPPPoseLandmarkerResult *)detectVideoFrame:(MPPImage *)image
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
error:(NSError **)error
NS_SWIFT_NAME(detect(videoFrame:timestampInMilliseconds:));
/**
* Sends live stream image data of type `MPImage` to perform pose landmarks detection using the
* whole image as region of interest. Rotation will be applied according to the `orientation`
* property of the provided `MPImage`. Only use this method when the `PoseLandmarker` is created
* with running mode`.liveStream`.
*
* The object which needs to be continuously notified of the available results of pose landmark
* detection must confirm to `PoseLandmarkerLiveStreamDelegate` protocol and implement the
* `poseLandmarker(_:didFinishDetectionWithResult:timestampInMilliseconds:error:)` delegate method.
*
* It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent
* to the pose landmarker. The input timestamps must be monotonically increasing.
*
* This method supports performing pose landmarks detection on RGBA images. If your `MPImage` has a
* source type of `.pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must use
* `kCVPixelFormatType_32BGRA` as its pixel format.
*
* If the input `MPImage` has a source type of `.image` ensure that the color space is RGB with an
* Alpha channel.
*
* If this method is used for performing pose landmarks detection on live camera frames using
* `AVFoundation`, ensure that you request `AVCaptureVideoDataOutput` to output frames in
* `kCMPixelFormat_32BGRA` using its `videoSettings` property.
*
* @param image A live stream image data of type `MPImage` on which pose landmarks detection is to
* be performed.
* @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input
* image is sent to the pose landmarker. The input timestamps must be monotonically increasing.
* @param error An optional error parameter populated when there is an error in performing pose
* landmark detection on the input live stream image data.
*
* @return `YES` if the image was sent to the task successfully, otherwise `NO`.
*/
- (BOOL)detectAsyncImage:(MPPImage *)image
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
error:(NSError **)error
NS_SWIFT_NAME(detectAsync(image:timestampInMilliseconds:));
- (instancetype)init NS_UNAVAILABLE;
+ (instancetype)new NS_UNAVAILABLE;
@end
NS_ASSUME_NONNULL_END

View File

@ -46,7 +46,7 @@ NS_SWIFT_NAME(PoseLandmarkerResult)
*/ */
- (instancetype)initWithLandmarks:(NSArray<NSArray<MPPNormalizedLandmark *> *> *)landmarks - (instancetype)initWithLandmarks:(NSArray<NSArray<MPPNormalizedLandmark *> *> *)landmarks
worldLandmarks:(NSArray<NSArray<MPPLandmark *> *> *)worldLandmarks worldLandmarks:(NSArray<NSArray<MPPLandmark *> *> *)worldLandmarks
segmentationMasks:(NSArray<MPPMask *> *)segmentationMasks segmentationMasks:(nullable NSArray<MPPMask *> *)segmentationMasks
timestampInMilliseconds:(NSInteger)timestampInMilliseconds NS_DESIGNATED_INITIALIZER; timestampInMilliseconds:(NSInteger)timestampInMilliseconds NS_DESIGNATED_INITIALIZER;
- (instancetype)initWithTimestampInMilliseconds:(NSInteger)timestampInMilliseconds NS_UNAVAILABLE; - (instancetype)initWithTimestampInMilliseconds:(NSInteger)timestampInMilliseconds NS_UNAVAILABLE;

View File

@ -0,0 +1,40 @@
// Copyright 2023 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#import <Foundation/Foundation.h>
#import "mediapipe/tasks/ios/components/containers/sources/MPPConnection.h"
NS_ASSUME_NONNULL_BEGIN
NSArray<MPPConnection *> *const MPPPoseLandmarksConnections = @[
[[MPPConnection alloc] initWithStart:0 end:1], [[MPPConnection alloc] initWithStart:1 end:2],
[[MPPConnection alloc] initWithStart:2 end:3], [[MPPConnection alloc] initWithStart:3 end:7],
[[MPPConnection alloc] initWithStart:0 end:4], [[MPPConnection alloc] initWithStart:4 end:5],
[[MPPConnection alloc] initWithStart:5 end:6], [[MPPConnection alloc] initWithStart:6 end:8],
[[MPPConnection alloc] initWithStart:9 end:10], [[MPPConnection alloc] initWithStart:11 end:12],
[[MPPConnection alloc] initWithStart:11 end:13], [[MPPConnection alloc] initWithStart:13 end:15],
[[MPPConnection alloc] initWithStart:15 end:17], [[MPPConnection alloc] initWithStart:15 end:19],
[[MPPConnection alloc] initWithStart:15 end:21], [[MPPConnection alloc] initWithStart:17 end:19],
[[MPPConnection alloc] initWithStart:12 end:14], [[MPPConnection alloc] initWithStart:14 end:16],
[[MPPConnection alloc] initWithStart:16 end:18], [[MPPConnection alloc] initWithStart:16 end:20],
[[MPPConnection alloc] initWithStart:16 end:22], [[MPPConnection alloc] initWithStart:18 end:20],
[[MPPConnection alloc] initWithStart:11 end:23], [[MPPConnection alloc] initWithStart:12 end:24],
[[MPPConnection alloc] initWithStart:23 end:24], [[MPPConnection alloc] initWithStart:23 end:25],
[[MPPConnection alloc] initWithStart:26 end:28], [[MPPConnection alloc] initWithStart:27 end:29],
[[MPPConnection alloc] initWithStart:28 end:30], [[MPPConnection alloc] initWithStart:29 end:31],
[[MPPConnection alloc] initWithStart:30 end:32], [[MPPConnection alloc] initWithStart:27 end:31],
[[MPPConnection alloc] initWithStart:28 end:32]
];
NS_ASSUME_NONNULL_END

View File

@ -36,3 +36,21 @@ objc_library(
"//mediapipe/tasks/ios/vision/pose_landmarker:MPPPoseLandmarkerOptions", "//mediapipe/tasks/ios/vision/pose_landmarker:MPPPoseLandmarkerOptions",
], ],
) )
objc_library(
name = "MPPPoseLandmarkerResultHelpers",
srcs = ["sources/MPPPoseLandmarkerResult+Helpers.mm"],
hdrs = ["sources/MPPPoseLandmarkerResult+Helpers.h"],
copts = [
"-ObjC++",
"-std=c++17",
"-x objective-c++",
],
deps = [
"//mediapipe/framework:packet",
"//mediapipe/framework/formats:image",
"//mediapipe/framework/formats:landmark_cc_proto",
"//mediapipe/tasks/ios/components/containers/utils:MPPLandmarkHelpers",
"//mediapipe/tasks/ios/vision/pose_landmarker:MPPPoseLandmarkerResult",
],
)

View File

@ -0,0 +1,63 @@
// Copyright 2023 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#import "mediapipe/tasks/ios/vision/pose_landmarker/sources/MPPPoseLandmarkerResult.h"
#include "mediapipe/framework/formats/image.h"
#include "mediapipe/framework/formats/landmark.pb.h"
#include "mediapipe/framework/packet.h"
NS_ASSUME_NONNULL_BEGIN
@interface MPPPoseLandmarkerResult (Helpers)
/**
* Creates an `MPPPoseLandmarkerResult` from landmarks, world landmarks and segmentation mask
* packets.
*
* @param landmarksPacket A MediaPipe packet wrapping a `std::vector<NormalizedlandmarkListProto>`.
* @param worldLandmarksPacket A MediaPipe packet wrapping a `std::vector<LandmarkListProto>`.
* @param segmentationMasksPacket a MediaPipe packet wrapping a `std::vector<Image>`.
*
* @return An `MPPPoseLandmarkerResult` object that contains the hand landmark detection
* results.
*/
+ (MPPPoseLandmarkerResult *)
poseLandmarkerResultWithLandmarksPacket:(const mediapipe::Packet &)landmarksPacket
worldLandmarksPacket:(const mediapipe::Packet &)worldLandmarksPacket
segmentationMasksPacket:(const mediapipe::Packet *)segmentationMasksPacket;
/**
* Creates an `MPPPoseLandmarkerResult` from landmarks, world landmarks and segmentation mask
* images.
*
* @param landmarksProto A vector of protos of type `std::vector<NormalizedlandmarkListProto>`.
* @param worldLandmarksProto A vector of protos of type `std::vector<LandmarkListProto>`.
* @param segmentationMasks A vector of type `std::vector<Image>`.
* @param timestampInMilliSeconds The timestamp of the Packet that contained the result.
*
* @return An `MPPPoseLandmarkerResult` object that contains the pose landmark detection
* results.
*/
+ (MPPPoseLandmarkerResult *)
poseLandmarkerResultWithLandmarksProto:
(const std::vector<::mediapipe::NormalizedLandmarkList> &)landmarksProto
worldLandmarksProto:
(const std::vector<::mediapipe::LandmarkList> &)worldLandmarksProto
segmentationMasks:(const std::vector<mediapipe::Image> *)segmentationMasks
timestampInMilliSeconds:(NSInteger)timestampInMilliseconds;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,124 @@
// Copyright 2023 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#import "mediapipe/tasks/ios/vision/pose_landmarker/utils/sources/MPPPoseLandmarkerResult+Helpers.h"
#import "mediapipe/tasks/ios/components/containers/utils/sources/MPPLandmark+Helpers.h"
namespace {
using LandmarkListProto = ::mediapipe::LandmarkList;
using NormalizedLandmarkListProto = ::mediapipe::NormalizedLandmarkList;
using ::mediapipe::Image;
using ::mediapipe::Packet;
static const int kMicroSecondsPerMilliSecond = 1000;
} // namespace
@implementation MPPPoseLandmarkerResult (Helpers)
+ (MPPPoseLandmarkerResult *)emptyPoseLandmarkerResultWithTimestampInMilliseconds:
(NSInteger)timestampInMilliseconds {
return [[MPPPoseLandmarkerResult alloc] initWithLandmarks:@[]
worldLandmarks:@[]
segmentationMasks:@[]
timestampInMilliseconds:timestampInMilliseconds];
}
+ (MPPPoseLandmarkerResult *)
poseLandmarkerResultWithLandmarksProto:
(const std::vector<NormalizedLandmarkListProto> &)landmarksProto
worldLandmarksProto:
(const std::vector<LandmarkListProto> &)worldLandmarksProto
segmentationMasks:(const std::vector<Image> *)segmentationMasks
timestampInMilliSeconds:(NSInteger)timestampInMilliseconds {
NSMutableArray<NSMutableArray<MPPNormalizedLandmark *> *> *multiplePoseLandmarks =
[NSMutableArray arrayWithCapacity:(NSUInteger)landmarksProto.size()];
for (const auto &landmarkListProto : landmarksProto) {
NSMutableArray<MPPNormalizedLandmark *> *landmarks =
[NSMutableArray arrayWithCapacity:(NSUInteger)landmarkListProto.landmark().size()];
for (const auto &normalizedLandmarkProto : landmarkListProto.landmark()) {
MPPNormalizedLandmark *normalizedLandmark =
[MPPNormalizedLandmark normalizedLandmarkWithProto:normalizedLandmarkProto];
[landmarks addObject:normalizedLandmark];
}
[multiplePoseLandmarks addObject:landmarks];
}
NSMutableArray<NSMutableArray<MPPLandmark *> *> *multiplePoseWorldLandmarks =
[NSMutableArray arrayWithCapacity:(NSUInteger)worldLandmarksProto.size()];
for (const auto &worldLandmarkListProto : worldLandmarksProto) {
NSMutableArray<MPPLandmark *> *worldLandmarks =
[NSMutableArray arrayWithCapacity:(NSUInteger)worldLandmarkListProto.landmark().size()];
for (const auto &landmarkProto : worldLandmarkListProto.landmark()) {
MPPLandmark *landmark = [MPPLandmark landmarkWithProto:landmarkProto];
[worldLandmarks addObject:landmark];
}
[multiplePoseWorldLandmarks addObject:worldLandmarks];
}
NSMutableArray<MPPMask *> *confidenceMasks =
[NSMutableArray arrayWithCapacity:(NSUInteger)segmentationMasks->size()];
for (const auto &segmentationMask : *segmentationMasks) {
[confidenceMasks addObject:[[MPPMask alloc] initWithFloat32Data:(float *)segmentationMask
.GetImageFrameSharedPtr()
.get()
->PixelData()
width:segmentationMask.width()
height:segmentationMask.height()
/** Always deep copy */
shouldCopy:YES]];
}
MPPPoseLandmarkerResult *poseLandmarkerResult =
[[MPPPoseLandmarkerResult alloc] initWithLandmarks:multiplePoseLandmarks
worldLandmarks:multiplePoseWorldLandmarks
segmentationMasks:confidenceMasks
timestampInMilliseconds:timestampInMilliseconds];
return poseLandmarkerResult;
}
+ (MPPPoseLandmarkerResult *)
poseLandmarkerResultWithLandmarksPacket:(const Packet &)landmarksPacket
worldLandmarksPacket:(const Packet &)worldLandmarksPacket
segmentationMasksPacket:(const Packet *)segmentationMasksPacket {
NSInteger timestampInMilliseconds =
(NSInteger)(landmarksPacket.Timestamp().Value() / kMicroSecondsPerMilliSecond);
if (landmarksPacket.IsEmpty()) {
return [MPPPoseLandmarkerResult
emptyPoseLandmarkerResultWithTimestampInMilliseconds:timestampInMilliseconds];
}
if (!landmarksPacket.ValidateAsType<std::vector<NormalizedLandmarkListProto>>().ok() ||
!worldLandmarksPacket.ValidateAsType<std::vector<LandmarkListProto>>().ok()) {
return [MPPPoseLandmarkerResult
emptyPoseLandmarkerResultWithTimestampInMilliseconds:timestampInMilliseconds];
}
const std::vector<Image> *segmentationMasks =
segmentationMasksPacket ? &(segmentationMasksPacket->Get<std::vector<Image>>()) : nullptr;
return [MPPPoseLandmarkerResult
poseLandmarkerResultWithLandmarksProto:landmarksPacket
.Get<std::vector<NormalizedLandmarkListProto>>()
worldLandmarksProto:worldLandmarksPacket
.Get<std::vector<LandmarkListProto>>()
segmentationMasks:segmentationMasks
timestampInMilliSeconds:timestampInMilliseconds];
}
@end