Merge pull request #4940 from priankakariatyml:ios-pose-landmarker-implementation

PiperOrigin-RevId: 580578919
This commit is contained in:
Copybara-Service 2023-11-08 10:39:24 -08:00
commit 65e74dde0f
21 changed files with 393 additions and 37 deletions

View File

@ -0,0 +1,35 @@
# Copyright 2023 The MediaPipe Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
package(default_visibility = ["//mediapipe/tasks:internal"])
licenses(["notice"])
objc_library(
name = "MPPPoseLandmarkerResultProtobufHelpers",
srcs = ["sources/MPPPoseLandmarkerResult+ProtobufHelpers.mm"],
hdrs = ["sources/MPPPoseLandmarkerResult+ProtobufHelpers.h"],
copts = [
"-ObjC++",
"-std=c++17",
"-x objective-c++",
],
deps = [
"//mediapipe/tasks/cc/components/containers/proto:landmarks_detection_result_cc_proto",
"//mediapipe/tasks/ios/common/utils:NSStringHelpers",
"//mediapipe/tasks/ios/test/vision/utils:parse_proto_utils",
"//mediapipe/tasks/ios/vision/pose_landmarker:MPPPoseLandmarkerResult",
"//mediapipe/tasks/ios/vision/pose_landmarker/utils:MPPPoseLandmarkerResultHelpers",
],
)

View File

@ -0,0 +1,26 @@
// Copyright 2023 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#import <Foundation/Foundation.h>
#import "mediapipe/tasks/ios/vision/pose_landmarker/sources/MPPPoseLandmarkerResult.h"
NS_ASSUME_NONNULL_BEGIN
@interface MPPPoseLandmarkerResult (ProtobufHelpers)
+ (MPPPoseLandmarkerResult *)poseLandmarkerResultFromProtobufFileWithName:(NSString *)fileName
shouldRemoveZPosition:(BOOL)removeZPosition;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,55 @@
// Copyright 2023 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#import "mediapipe/tasks/ios/test/vision/pose_landmarker/utils/sources/MPPPoseLandmarkerResult+ProtobufHelpers.h"
#import "mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.h"
#import "mediapipe/tasks/ios/vision/pose_landmarker/utils/sources/MPPPoseLandmarkerResult+Helpers.h"
#include "mediapipe/tasks/cc/components/containers/proto/landmarks_detection_result.pb.h"
#include "mediapipe/tasks/ios/test/vision/utils/sources/parse_proto_utils.h"
namespace {
using LandmarksDetectionResultProto =
::mediapipe::tasks::containers::proto::LandmarksDetectionResult;
using ::mediapipe::tasks::ios::test::vision::utils::get_proto_from_pbtxt;
} // anonymous namespace
@implementation MPPPoseLandmarkerResult (ProtobufHelpers)
+ (MPPPoseLandmarkerResult *)poseLandmarkerResultFromProtobufFileWithName:(NSString *)fileName
shouldRemoveZPosition:(BOOL)removeZPosition {
LandmarksDetectionResultProto landmarkDetectionResultProto;
if (!get_proto_from_pbtxt(fileName.cppString, landmarkDetectionResultProto).ok()) {
return nil;
}
if (removeZPosition) {
// Remove z position of landmarks, because they are not used in correctness testing. For video
// or live stream mode, the z positions varies a lot during tracking from frame to frame.
for (int i = 0; i < landmarkDetectionResultProto.landmarks().landmark().size(); i++) {
auto &landmark = *landmarkDetectionResultProto.mutable_landmarks()->mutable_landmark(i);
landmark.clear_z();
}
}
return [MPPPoseLandmarkerResult
poseLandmarkerResultWithLandmarksProto:{landmarkDetectionResultProto.landmarks()}
worldLandmarksProto:{landmarkDetectionResultProto.world_landmarks()}
segmentationMasks:nullptr
timestampInMilliseconds:0];
}
@end

View File

@ -17,14 +17,14 @@
#include "mediapipe/tasks/cc/components/containers/proto/classifications.pb.h"
static const int kMicroSecondsPerMilliSecond = 1000;
static const int kMicrosecondsPerMillisecond = 1000;
namespace {
using ClassificationResultProto =
::mediapipe::tasks::components::containers::proto::ClassificationResult;
} // namespace
#define int kMicroSecondsPerMilliSecond = 1000;
#define int kMicrosecondsPerMillisecond = 1000;
@implementation MPPLanguageDetectorResult (Helpers)
@ -36,7 +36,7 @@ using ClassificationResultProto =
return [MPPLanguageDetectorResult
languageDetectorResultWithClassificationResult:classificationResult
timestampInMilliseconds:(NSInteger)(packet.Timestamp().Value() /
kMicroSecondsPerMilliSecond)];
kMicrosecondsPerMillisecond)];
}
+ (MPPLanguageDetectorResult *)

View File

@ -17,7 +17,7 @@
#include "mediapipe/tasks/cc/components/containers/proto/classifications.pb.h"
static const int kMicroSecondsPerMilliSecond = 1000;
static const int kMicrosecondsPerMillisecond = 1000;
namespace {
using ClassificationResultProto =
@ -25,7 +25,7 @@ using ClassificationResultProto =
using ::mediapipe::Packet;
} // namespace
#define int kMicroSecondsPerMilliSecond = 1000;
#define int kMicrosecondsPerMillisecond = 1000;
@implementation MPPTextClassifierResult (Helpers)
@ -36,7 +36,7 @@ using ::mediapipe::Packet;
return [[MPPTextClassifierResult alloc]
initWithClassificationResult:classificationResult
timestampInMilliseconds:(NSInteger)(packet.Timestamp().Value() /
kMicroSecondsPerMilliSecond)];
kMicrosecondsPerMillisecond)];
}
@end

View File

@ -17,14 +17,14 @@
#include "mediapipe/tasks/cc/components/containers/proto/embeddings.pb.h"
static const int kMicroSecondsPerMilliSecond = 1000;
static const int kMicrosecondsPerMillisecond = 1000;
namespace {
using EmbeddingResultProto = ::mediapipe::tasks::components::containers::proto::EmbeddingResult;
using ::mediapipe::Packet;
} // namespace
#define int kMicroSecondsPerMilliSecond = 1000;
#define int kMicrosecondsPerMillisecond = 1000;
@implementation MPPTextEmbedderResult (Helpers)
@ -35,7 +35,7 @@ using ::mediapipe::Packet;
return [[MPPTextEmbedderResult alloc]
initWithEmbeddingResult:embeddingResult
timestampInMilliseconds:(NSInteger)(packet.Timestamp().Value() /
kMicroSecondsPerMilliSecond)];
kMicrosecondsPerMillisecond)];
}
@end

View File

@ -18,7 +18,7 @@
#include "mediapipe/framework/formats/image.h"
#include "mediapipe/framework/timestamp.h"
static const NSUInteger kMicroSecondsPerMilliSecond = 1000;
static const NSUInteger kMicrosecondsPerMillisecond = 1000;
namespace {
using ::mediapipe::Image;
@ -51,7 +51,7 @@ using ::mediapipe::Timestamp;
}
return MakePacket<Image>(std::move(imageFrame))
.At(Timestamp(int64(timestampInMilliseconds * kMicroSecondsPerMilliSecond)));
.At(Timestamp(int64(timestampInMilliseconds * kMicrosecondsPerMillisecond)));
}
+ (Packet)createPacketWithNormalizedRect:(NormalizedRect &)normalizedRect {
@ -61,7 +61,7 @@ using ::mediapipe::Timestamp;
+ (Packet)createPacketWithNormalizedRect:(NormalizedRect &)normalizedRect
timestampInMilliseconds:(NSInteger)timestampInMilliseconds {
return MakePacket<NormalizedRect>(std::move(normalizedRect))
.At(Timestamp(int64(timestampInMilliseconds * kMicroSecondsPerMilliSecond)));
.At(Timestamp(int64(timestampInMilliseconds * kMicrosecondsPerMillisecond)));
}
@end

View File

@ -198,7 +198,7 @@ static NSString *const kTaskName = @"gestureRecognizer";
NSInteger timestampInMilliseconds =
outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() /
kMicroSecondsPerMilliSecond;
kMicrosecondsPerMillisecond;
dispatch_async(_callbackQueue, ^{
[self.gestureRecognizerLiveStreamDelegate gestureRecognizer:self
didFinishRecognitionWithResult:result

View File

@ -20,7 +20,7 @@
NS_ASSUME_NONNULL_BEGIN
static const int kMicroSecondsPerMilliSecond = 1000;
static const int kMicrosecondsPerMillisecond = 1000;
@interface MPPGestureRecognizerResult (Helpers)

View File

@ -117,7 +117,7 @@ static const NSInteger kDefaultGestureIndex = -1;
handLandmarksPacket:(const Packet &)handLandmarksPacket
worldLandmarksPacket:(const Packet &)worldLandmarksPacket {
NSInteger timestampInMilliseconds =
(NSInteger)(handGesturesPacket.Timestamp().Value() / kMicroSecondsPerMilliSecond);
(NSInteger)(handGesturesPacket.Timestamp().Value() / kMicrosecondsPerMillisecond);
if (handGesturesPacket.IsEmpty()) {
return [MPPGestureRecognizerResult

View File

@ -222,7 +222,7 @@ static NSString *const kTaskName = @"handLandmarker";
NSInteger timestampInMilliseconds =
outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() /
kMicroSecondsPerMilliSecond;
kMicrosecondsPerMillisecond;
dispatch_async(_callbackQueue, ^{
[self.handLandmarkerLiveStreamDelegate handLandmarker:self
didFinishDetectionWithResult:result

View File

@ -20,7 +20,7 @@
NS_ASSUME_NONNULL_BEGIN
static const int kMicroSecondsPerMilliSecond = 1000;
static const int kMicrosecondsPerMillisecond = 1000;
@interface MPPHandLandmarkerResult (Helpers)

View File

@ -95,7 +95,7 @@ using ::mediapipe::Packet;
worldLandmarksPacket:(const Packet &)worldLandmarksPacket
handednessPacket:(const Packet &)handednessPacket {
NSInteger timestampInMilliseconds =
(NSInteger)(landmarksPacket.Timestamp().Value() / kMicroSecondsPerMilliSecond);
(NSInteger)(landmarksPacket.Timestamp().Value() / kMicrosecondsPerMillisecond);
if (landmarksPacket.IsEmpty()) {
return [MPPHandLandmarkerResult

View File

@ -43,7 +43,7 @@ static NSString *const kTaskGraphName =
@"mediapipe.tasks.vision.image_classifier.ImageClassifierGraph";
static NSString *const kTaskName = @"imageClassifier";
static const int kMicroSecondsPerMilliSecond = 1000;
static const int kMicrosecondsPerMillisecond = 1000;
#define InputPacketMap(imagePacket, normalizedRectPacket) \
{ \
@ -221,7 +221,7 @@ static const int kMicroSecondsPerMilliSecond = 1000;
NSInteger timestampInMilliseconds =
outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() /
kMicroSecondsPerMilliSecond;
kMicrosecondsPerMillisecond;
dispatch_async(_callbackQueue, ^{
[self.imageClassifierLiveStreamDelegate imageClassifier:self
didFinishClassificationWithResult:result

View File

@ -182,7 +182,7 @@ static NSString *const kTaskName = @"objectDetector";
NSInteger timestampInMilliseconds =
outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() /
kMicroSecondsPerMilliSecond;
kMicrosecondsPerMillisecond;
dispatch_async(_callbackQueue, ^{
[self.objectDetectorLiveStreamDelegate objectDetector:self
didFinishDetectionWithResult:result

View File

@ -18,7 +18,7 @@
NS_ASSUME_NONNULL_BEGIN
static const int kMicroSecondsPerMilliSecond = 1000;
static const int kMicrosecondsPerMillisecond = 1000;
@interface MPPObjectDetectorResult (Helpers)

View File

@ -27,7 +27,7 @@ using ::mediapipe::Packet;
(const Packet &)packet {
NSInteger timestampInMilliseconds = (NSInteger)(packet.Timestamp().Value() /
kMicroSecondsPerMilliSecond);
kMicrosecondsPerMillisecond);
if (!packet.ValidateAsType<std::vector<DetectionProto>>().ok()) {
return [[MPPObjectDetectorResult alloc] initWithDetections:@[]
timestampInMilliseconds:timestampInMilliseconds];

View File

@ -47,13 +47,27 @@ objc_library(
objc_library(
name = "MPPPoseLandmarker",
srcs = ["sources/MPPPoseLandmarker.mm"],
hdrs = ["sources/MPPPoseLandmarker.h"],
copts = [
"-ObjC++",
"-std=c++17",
"-x objective-c++",
],
module_name = "MPPPoseLandmarker",
deps = [
":MPPPoseLandmarkerOptions",
":MPPPoseLandmarkerResult",
":MPPPoseLandmarksConnections",
"//mediapipe/tasks/cc/vision/pose_landmarker:pose_landmarker_graph",
"//mediapipe/tasks/ios/common/utils:MPPCommonUtils",
"//mediapipe/tasks/ios/common/utils:NSStringHelpers",
"//mediapipe/tasks/ios/components/containers:MPPConnection",
"//mediapipe/tasks/ios/core:MPPTaskInfo",
"//mediapipe/tasks/ios/vision/core:MPPImage",
"//mediapipe/tasks/ios/vision/core:MPPVisionPacketCreator",
"//mediapipe/tasks/ios/vision/core:MPPVisionTaskRunner",
"//mediapipe/tasks/ios/vision/pose_landmarker/utils:MPPPoseLandmarkerOptionsHelpers",
"//mediapipe/tasks/ios/vision/pose_landmarker/utils:MPPPoseLandmarkerResultHelpers",
],
)

View File

@ -0,0 +1,220 @@
// Copyright 2023 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#import "mediapipe/tasks/ios/vision/pose_landmarker/sources/MPPPoseLandmarker.h"
#import "mediapipe/tasks/ios/common/utils/sources/MPPCommonUtils.h"
#import "mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.h"
#import "mediapipe/tasks/ios/core/sources/MPPTaskInfo.h"
#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionPacketCreator.h"
#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunner.h"
#import "mediapipe/tasks/ios/vision/pose_landmarker/sources/MPPPoseLandmarksConnections.h"
#import "mediapipe/tasks/ios/vision/pose_landmarker/utils/sources/MPPPoseLandmarkerOptions+Helpers.h"
#import "mediapipe/tasks/ios/vision/pose_landmarker/utils/sources/MPPPoseLandmarkerResult+Helpers.h"
namespace {
using ::mediapipe::Timestamp;
using ::mediapipe::tasks::core::PacketMap;
using ::mediapipe::tasks::core::PacketsCallback;
} // namespace
static NSString *const kImageTag = @"IMAGE";
static NSString *const kImageInStreamName = @"image_in";
static NSString *const kNormRectTag = @"NORM_RECT";
static NSString *const kNormRectInStreamName = @"norm_rect_in";
static NSString *const kImageOutStreamName = @"image_out";
static NSString *const kPoseLandmarksTag = @"NORM_LANDMARKS";
static NSString *const kPoseLandmarksOutStreamName = @"pose_landmarks";
static NSString *const kWorldLandmarksTag = @"WORLD_LANDMARKS";
static NSString *const kWorldLandmarksOutStreamName = @"world_landmarks";
static NSString *const kSegmentationMasksTag = @"SEGMENTATION_MASK";
static NSString *const kSegmentationMasksOutStreamName = @"segmentation_masks";
static NSString *const kTaskGraphName =
@"mediapipe.tasks.vision.pose_landmarker.PoseLandmarkerGraph";
static NSString *const kTaskName = @"poseLandmarker";
#define InputPacketMap(imagePacket, normalizedRectPacket) \
{ \
{kImageInStreamName.cppString, imagePacket}, { \
kNormRectInStreamName.cppString, normalizedRectPacket \
} \
}
#define PoseLandmarkerResultWithOutputPacketMap(outputPacketMap) \
([MPPPoseLandmarkerResult \
poseLandmarkerResultWithLandmarksPacket:outputPacketMap[kPoseLandmarksOutStreamName \
.cppString] \
worldLandmarksPacket:outputPacketMap[kWorldLandmarksOutStreamName \
.cppString] \
segmentationMasksPacket:&(outputPacketMap[kSegmentationMasksOutStreamName \
.cppString])])
@interface MPPPoseLandmarker () {
/** iOS Vision Task Runner */
MPPVisionTaskRunner *_visionTaskRunner;
dispatch_queue_t _callbackQueue;
}
@property(nonatomic, weak) id<MPPPoseLandmarkerLiveStreamDelegate> poseLandmarkerLiveStreamDelegate;
@end
@implementation MPPPoseLandmarker
#pragma mark - Public
- (instancetype)initWithOptions:(MPPPoseLandmarkerOptions *)options error:(NSError **)error {
self = [super init];
if (self) {
MPPTaskInfo *taskInfo = [[MPPTaskInfo alloc]
initWithTaskGraphName:kTaskGraphName
inputStreams:@[
[NSString stringWithFormat:@"%@:%@", kImageTag, kImageInStreamName],
[NSString stringWithFormat:@"%@:%@", kNormRectTag, kNormRectInStreamName]
]
outputStreams:@[
[NSString
stringWithFormat:@"%@:%@", kPoseLandmarksTag, kPoseLandmarksOutStreamName],
[NSString
stringWithFormat:@"%@:%@", kWorldLandmarksTag, kWorldLandmarksOutStreamName],
[NSString stringWithFormat:@"%@:%@", kSegmentationMasksTag,
kSegmentationMasksOutStreamName],
[NSString stringWithFormat:@"%@:%@", kImageTag, kImageOutStreamName]
]
taskOptions:options
enableFlowLimiting:options.runningMode == MPPRunningModeLiveStream
error:error];
if (!taskInfo) {
return nil;
}
PacketsCallback packetsCallback = nullptr;
if (options.poseLandmarkerLiveStreamDelegate) {
_poseLandmarkerLiveStreamDelegate = options.poseLandmarkerLiveStreamDelegate;
// Create a private serial dispatch queue in which the deleagte method will be called
// asynchronously. This is to ensure that if the client performs a long running operation in
// the delegate method, the queue on which the C++ callbacks is invoked is not blocked and is
// freed up to continue with its operations.
_callbackQueue = dispatch_queue_create(
[MPPVisionTaskRunner uniqueDispatchQueueNameWithSuffix:kTaskName], nullptr);
// Capturing `self` as weak in order to avoid `self` being kept in memory
// and cause a retain cycle, after self is set to `nil`.
MPPPoseLandmarker *__weak weakSelf = self;
packetsCallback = [=](absl::StatusOr<PacketMap> liveStreamResult) {
[weakSelf processLiveStreamResult:liveStreamResult];
};
}
_visionTaskRunner = [[MPPVisionTaskRunner alloc] initWithTaskInfo:taskInfo
runningMode:options.runningMode
roiAllowed:NO
packetsCallback:std::move(packetsCallback)
imageInputStreamName:kImageInStreamName
normRectInputStreamName:kNormRectInStreamName
error:error];
if (!_visionTaskRunner) {
return nil;
}
}
return self;
}
- (instancetype)initWithModelPath:(NSString *)modelPath error:(NSError **)error {
MPPPoseLandmarkerOptions *options = [[MPPPoseLandmarkerOptions alloc] init];
options.baseOptions.modelAssetPath = modelPath;
return [self initWithOptions:options error:error];
}
- (nullable MPPPoseLandmarkerResult *)detectImage:(MPPImage *)image error:(NSError **)error {
std::optional<PacketMap> outputPacketMap = [_visionTaskRunner processImage:image error:error];
return [MPPPoseLandmarker poseLandmarkerResultWithOptionalOutputPacketMap:outputPacketMap];
}
- (nullable MPPPoseLandmarkerResult *)detectVideoFrame:(MPPImage *)image
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
error:(NSError **)error {
std::optional<PacketMap> outputPacketMap =
[_visionTaskRunner processVideoFrame:image
timestampInMilliseconds:timestampInMilliseconds
error:error];
return [MPPPoseLandmarker poseLandmarkerResultWithOptionalOutputPacketMap:outputPacketMap];
}
- (BOOL)detectAsyncImage:(MPPImage *)image
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
error:(NSError **)error {
return [_visionTaskRunner processLiveStreamImage:image
timestampInMilliseconds:timestampInMilliseconds
error:error];
}
+ (NSArray<MPPConnection *> *)poseLandmarks {
return MPPPoseLandmarksConnections;
}
#pragma mark - Private
- (void)processLiveStreamResult:(absl::StatusOr<PacketMap>)liveStreamResult {
if (![self.poseLandmarkerLiveStreamDelegate
respondsToSelector:@selector(poseLandmarker:
didFinishDetectionWithResult:timestampInMilliseconds:error:)]) {
return;
}
NSError *callbackError = nil;
if (![MPPCommonUtils checkCppError:liveStreamResult.status() toError:&callbackError]) {
dispatch_async(_callbackQueue, ^{
[self.poseLandmarkerLiveStreamDelegate poseLandmarker:self
didFinishDetectionWithResult:nil
timestampInMilliseconds:Timestamp::Unset().Value()
error:callbackError];
});
return;
}
PacketMap &outputPacketMap = liveStreamResult.value();
if (outputPacketMap[kImageOutStreamName.cppString].IsEmpty()) {
return;
}
MPPPoseLandmarkerResult *result = PoseLandmarkerResultWithOutputPacketMap(outputPacketMap);
NSInteger timestampInMilliseconds =
outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() /
kMicrosecondsPerMillisecond;
dispatch_async(_callbackQueue, ^{
[self.poseLandmarkerLiveStreamDelegate poseLandmarker:self
didFinishDetectionWithResult:result
timestampInMilliseconds:timestampInMilliseconds
error:callbackError];
});
}
+ (nullable MPPPoseLandmarkerResult *)poseLandmarkerResultWithOptionalOutputPacketMap:
(std::optional<PacketMap> &)outputPacketMap {
if (!outputPacketMap.has_value()) {
return nil;
}
return PoseLandmarkerResultWithOutputPacketMap(outputPacketMap.value());
}
@end

View File

@ -20,6 +20,8 @@
NS_ASSUME_NONNULL_BEGIN
static const int kMicrosecondsPerMillisecond = 1000;
@interface MPPPoseLandmarkerResult (Helpers)
/**
@ -55,8 +57,9 @@ NS_ASSUME_NONNULL_BEGIN
(const std::vector<::mediapipe::NormalizedLandmarkList> &)landmarksProto
worldLandmarksProto:
(const std::vector<::mediapipe::LandmarkList> &)worldLandmarksProto
segmentationMasks:(const std::vector<mediapipe::Image> *)segmentationMasks
timestampInMilliSeconds:(NSInteger)timestampInMilliseconds;
segmentationMasks:
(nullable const std::vector<mediapipe::Image> *)segmentationMasks
timestampInMilliseconds:(NSInteger)timestampInMilliseconds;
@end

View File

@ -21,8 +21,6 @@ using LandmarkListProto = ::mediapipe::LandmarkList;
using NormalizedLandmarkListProto = ::mediapipe::NormalizedLandmarkList;
using ::mediapipe::Image;
using ::mediapipe::Packet;
static const int kMicroSecondsPerMilliSecond = 1000;
} // namespace
@implementation MPPPoseLandmarkerResult (Helpers)
@ -40,8 +38,8 @@ static const int kMicroSecondsPerMilliSecond = 1000;
(const std::vector<NormalizedLandmarkListProto> &)landmarksProto
worldLandmarksProto:
(const std::vector<LandmarkListProto> &)worldLandmarksProto
segmentationMasks:(const std::vector<Image> *)segmentationMasks
timestampInMilliSeconds:(NSInteger)timestampInMilliseconds {
segmentationMasks:(nullable const std::vector<Image> *)segmentationMasks
timestampInMilliseconds:(NSInteger)timestampInMilliseconds {
NSMutableArray<NSMutableArray<MPPNormalizedLandmark *> *> *multiplePoseLandmarks =
[NSMutableArray arrayWithCapacity:(NSUInteger)landmarksProto.size()];
@ -69,6 +67,12 @@ static const int kMicroSecondsPerMilliSecond = 1000;
[multiplePoseWorldLandmarks addObject:worldLandmarks];
}
if (!segmentationMasks) {
return [[MPPPoseLandmarkerResult alloc] initWithLandmarks:multiplePoseLandmarks
worldLandmarks:multiplePoseWorldLandmarks
segmentationMasks:nil
timestampInMilliseconds:timestampInMilliseconds];
}
NSMutableArray<MPPMask *> *confidenceMasks =
[NSMutableArray arrayWithCapacity:(NSUInteger)segmentationMasks->size()];
@ -83,12 +87,11 @@ static const int kMicroSecondsPerMilliSecond = 1000;
shouldCopy:YES]];
}
MPPPoseLandmarkerResult *poseLandmarkerResult =
[[MPPPoseLandmarkerResult alloc] initWithLandmarks:multiplePoseLandmarks
worldLandmarks:multiplePoseWorldLandmarks
segmentationMasks:confidenceMasks
timestampInMilliseconds:timestampInMilliseconds];
return poseLandmarkerResult;
return [[MPPPoseLandmarkerResult alloc] initWithLandmarks:multiplePoseLandmarks
worldLandmarks:multiplePoseWorldLandmarks
segmentationMasks:confidenceMasks
timestampInMilliseconds:timestampInMilliseconds];
;
}
+ (MPPPoseLandmarkerResult *)
@ -96,7 +99,7 @@ static const int kMicroSecondsPerMilliSecond = 1000;
worldLandmarksPacket:(const Packet &)worldLandmarksPacket
segmentationMasksPacket:(const Packet *)segmentationMasksPacket {
NSInteger timestampInMilliseconds =
(NSInteger)(landmarksPacket.Timestamp().Value() / kMicroSecondsPerMilliSecond);
(NSInteger)(landmarksPacket.Timestamp().Value() / kMicrosecondsPerMillisecond);
if (landmarksPacket.IsEmpty()) {
return [MPPPoseLandmarkerResult
@ -118,7 +121,7 @@ static const int kMicroSecondsPerMilliSecond = 1000;
worldLandmarksProto:worldLandmarksPacket
.Get<std::vector<LandmarkListProto>>()
segmentationMasks:segmentationMasks
timestampInMilliSeconds:timestampInMilliseconds];
timestampInMilliseconds:timestampInMilliseconds];
}
@end