From fddc3facf0d2cbc4806ed9c0c2b4a6df698ebcad Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Fri, 26 May 2023 16:00:35 -0700 Subject: [PATCH] Add FaceLandmarker Result API PiperOrigin-RevId: 535735431 --- .../sources/MPPClassificationResult+Helpers.h | 9 +- .../MPPClassificationResult+Helpers.mm | 36 ++++-- .../test/vision/face_landmarker/utils/BUILD | 55 +++++++++ .../MPPFaceLandmarkerResult+HelpersTests.mm | 112 ++++++++++++++++++ .../tasks/ios/vision/face_landmarker/BUILD | 16 +++ .../sources/MPPFaceLandmarkerResult.h | 99 ++++++++++++++++ .../sources/MPPFaceLandmarkerResult.mm | 74 ++++++++++++ .../ios/vision/face_landmarker/utils/BUILD | 18 +++ .../sources/MPPFaceLandmarkerResult+Helpers.h | 44 +++++++ .../MPPFaceLandmarkerResult+Helpers.mm | 101 ++++++++++++++++ 10 files changed, 553 insertions(+), 11 deletions(-) create mode 100644 mediapipe/tasks/ios/test/vision/face_landmarker/utils/BUILD create mode 100644 mediapipe/tasks/ios/test/vision/face_landmarker/utils/sources/MPPFaceLandmarkerResult+HelpersTests.mm create mode 100644 mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerResult.h create mode 100644 mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerResult.mm create mode 100644 mediapipe/tasks/ios/vision/face_landmarker/utils/sources/MPPFaceLandmarkerResult+Helpers.h create mode 100644 mediapipe/tasks/ios/vision/face_landmarker/utils/sources/MPPFaceLandmarkerResult+Helpers.mm diff --git a/mediapipe/tasks/ios/components/containers/utils/sources/MPPClassificationResult+Helpers.h b/mediapipe/tasks/ios/components/containers/utils/sources/MPPClassificationResult+Helpers.h index fde436feb..5e2738e1d 100644 --- a/mediapipe/tasks/ios/components/containers/utils/sources/MPPClassificationResult+Helpers.h +++ b/mediapipe/tasks/ios/components/containers/utils/sources/MPPClassificationResult+Helpers.h @@ -19,7 +19,14 @@ NS_ASSUME_NONNULL_BEGIN @interface MPPClassifications (Helpers) -+ (MPPClassifications *)classificationsWithProto: +/** Creates a new `MPPClassifications` object from the given proto, head index and head name. */ ++ (MPPClassifications *)classificationsWithClassificationListProto: + (const ::mediapipe::ClassificationList &)proto + headIndex:(NSInteger)headIndex + headName:(NSString *)headName; + +/** Creates a new `MPPClassifications` object from the given classifications proto. */ ++ (MPPClassifications *)classificationsWithClassificationsProto: (const mediapipe::tasks::components::containers::proto::Classifications &)classificationsProto; @end diff --git a/mediapipe/tasks/ios/components/containers/utils/sources/MPPClassificationResult+Helpers.mm b/mediapipe/tasks/ios/components/containers/utils/sources/MPPClassificationResult+Helpers.mm index 47f1cf45c..e1453c9c5 100644 --- a/mediapipe/tasks/ios/components/containers/utils/sources/MPPClassificationResult+Helpers.mm +++ b/mediapipe/tasks/ios/components/containers/utils/sources/MPPClassificationResult+Helpers.mm @@ -17,6 +17,7 @@ #import "mediapipe/tasks/ios/components/containers/utils/sources/MPPClassificationResult+Helpers.h" namespace { +using ClassificationListProto = ::mediapipe::ClassificationList; using ClassificationsProto = ::mediapipe::tasks::components::containers::proto::Classifications; using ClassificationResultProto = ::mediapipe::tasks::components::containers::proto::ClassificationResult; @@ -24,7 +25,22 @@ using ClassificationResultProto = @implementation MPPClassifications (Helpers) -+ (MPPClassifications *)classificationsWithProto: ++ (MPPClassifications *)classificationsWithClassificationListProto: + (const ClassificationListProto &)classificationListProto + headIndex:(NSInteger)headIndex + headName:(NSString *)headName { + NSMutableArray *categories = + [NSMutableArray arrayWithCapacity:(NSUInteger)classificationListProto.classification_size()]; + for (const auto &classification : classificationListProto.classification()) { + [categories addObject:[MPPCategory categoryWithProto:classification]]; + } + + return [[MPPClassifications alloc] initWithHeadIndex:headIndex + headName:headName + categories:categories]; +} + ++ (MPPClassifications *)classificationsWithClassificationsProto: (const ClassificationsProto &)classificationsProto { NSMutableArray *categories = [NSMutableArray arrayWithCapacity:(NSUInteger)classificationsProto.classification_list() @@ -33,14 +49,14 @@ using ClassificationResultProto = [categories addObject:[MPPCategory categoryWithProto:classification]]; } - NSString *headName; - if (classificationsProto.has_head_name()) { - headName = [NSString stringWithCppString:classificationsProto.head_name()]; - } + NSString *headName = classificationsProto.has_head_name() + ? [NSString stringWithCppString:classificationsProto.head_name()] + : [NSString string]; - return [[MPPClassifications alloc] initWithHeadIndex:(NSInteger)classificationsProto.head_index() - headName:headName - categories:categories]; + return [MPPClassifications + classificationsWithClassificationListProto:classificationsProto.classification_list() + headIndex:(NSInteger)classificationsProto.head_index() + headName:headName]; } @end @@ -52,7 +68,8 @@ using ClassificationResultProto = NSMutableArray *classifications = [NSMutableArray arrayWithCapacity:(NSUInteger)classificationResultProto.classifications_size()]; for (const auto &classificationsProto : classificationResultProto.classifications()) { - [classifications addObject:[MPPClassifications classificationsWithProto:classificationsProto]]; + [classifications addObject:[MPPClassifications + classificationsWithClassificationsProto:classificationsProto]]; } NSInteger timestampInMilliseconds = 0; @@ -62,7 +79,6 @@ using ClassificationResultProto = return [[MPPClassificationResult alloc] initWithClassifications:classifications timestampInMilliseconds:timestampInMilliseconds]; - ; } @end diff --git a/mediapipe/tasks/ios/test/vision/face_landmarker/utils/BUILD b/mediapipe/tasks/ios/test/vision/face_landmarker/utils/BUILD new file mode 100644 index 000000000..74f2bd11a --- /dev/null +++ b/mediapipe/tasks/ios/test/vision/face_landmarker/utils/BUILD @@ -0,0 +1,55 @@ +load("@build_bazel_rules_apple//apple:ios.bzl", "ios_unit_test") +load( + "//mediapipe/framework/tool:ios.bzl", + "MPP_TASK_MINIMUM_OS_VERSION", +) +load( + "@org_tensorflow//tensorflow/lite:special_rules.bzl", + "tflite_ios_lab_runner", +) + +package(default_visibility = ["//mediapipe/tasks:internal"]) + +licenses(["notice"]) + +# Default tags for filtering iOS targets. Targets are restricted to Apple platforms. +TFL_DEFAULT_TAGS = [ + "apple", +] + +# Following sanitizer tests are not supported by iOS test targets. +TFL_DISABLED_SANITIZER_TAGS = [ + "noasan", + "nomsan", + "notsan", +] + +objc_library( + name = "MPPFaceLandmarkeResultHelpersTestLibary", + testonly = 1, + srcs = ["sources/MPPFaceLandmarkerResult+HelpersTests.mm"], + copts = [ + "-ObjC++", + "-std=c++17", + "-x objective-c++", + ], + deps = [ + "//mediapipe/framework:packet", + "//mediapipe/framework/formats:classification_cc_proto", + "//mediapipe/framework/formats:landmark_cc_proto", + "//mediapipe/framework/formats:matrix_data_cc_proto", + "//mediapipe/tasks/cc/vision/face_geometry/proto:face_geometry_cc_proto", + "//mediapipe/tasks/ios/vision/face_landmarker:MPPFaceLandmarkerResult", + "//mediapipe/tasks/ios/vision/face_landmarker/utils:MPPFaceLandmarkerResultHelpers", + ], +) + +ios_unit_test( + name = "MPPFaceLandmarkeResultHelpersTest", + minimum_os_version = MPP_TASK_MINIMUM_OS_VERSION, + runner = tflite_ios_lab_runner("IOS_LATEST"), + tags = TFL_DEFAULT_TAGS + TFL_DISABLED_SANITIZER_TAGS, + deps = [ + ":MPPFaceLandmarkeResultHelpersTestLibary", + ], +) diff --git a/mediapipe/tasks/ios/test/vision/face_landmarker/utils/sources/MPPFaceLandmarkerResult+HelpersTests.mm b/mediapipe/tasks/ios/test/vision/face_landmarker/utils/sources/MPPFaceLandmarkerResult+HelpersTests.mm new file mode 100644 index 000000000..3572aa47e --- /dev/null +++ b/mediapipe/tasks/ios/test/vision/face_landmarker/utils/sources/MPPFaceLandmarkerResult+HelpersTests.mm @@ -0,0 +1,112 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import + +#include "mediapipe/framework/formats/classification.pb.h" +#include "mediapipe/framework/formats/landmark.pb.h" +#include "mediapipe/framework/formats/matrix_data.pb.h" +#include "mediapipe/framework/packet.h" +#include "mediapipe/tasks/cc/vision/face_geometry/proto/face_geometry.pb.h" +#import "mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerResult.h" +#import "mediapipe/tasks/ios/vision/face_landmarker/utils/sources/MPPFaceLandmarkerResult+Helpers.h" + +using ::mediapipe::MakePacket; +using ::mediapipe::Packet; +using ::mediapipe::Timestamp; +using NormalizedLandmarkListProto = ::mediapipe::NormalizedLandmarkList; +using ClassificationListProto = ::mediapipe::ClassificationList; +using FaceGeometryProto = ::mediapipe::tasks::vision::face_geometry::proto::FaceGeometry; + +static constexpr int kMicrosecondsPerMillisecond = 1000; + +@interface MPPLandmarkerResultHelpersTests : XCTestCase { +} +@end + +@implementation MPPLandmarkerResultHelpersTests + +- (void)testCreatesResultFromLandmarkerPackets { + const std::vector normalizedLandmarkProtos({{}}); + const std::vector classificationProtos({{}}); + const std::vector faceGeometryProto({{}}); + + const auto landmarksPacket = + MakePacket>(normalizedLandmarkProtos) + .At(Timestamp(42 * kMicrosecondsPerMillisecond)); + const auto classificationsPacket = + MakePacket>(classificationProtos) + .At(Timestamp(42 * kMicrosecondsPerMillisecond)); + const auto faceGeometryPacket = MakePacket>(faceGeometryProto) + .At(Timestamp(42 * kMicrosecondsPerMillisecond)); + + MPPFaceLandmarkerResult *results = + [MPPFaceLandmarkerResult faceLandmarkerResultWithLandmarksPacket:landmarksPacket + blendshapesPacket:classificationsPacket + transformationMatrixesPacket:faceGeometryPacket]; + + XCTAssertEqual(results.faceLandmarks.count, 1); + XCTAssertEqual(results.faceBlendshapes.count, 1); + XCTAssertEqual(results.facialTransformationMatrixes.count, 1); + XCTAssertEqual(results.timestampInMilliseconds, 42); +} + +- (void)testCreatesCreatesCopyOfFacialTransformationMatrix { + MPPFaceLandmarkerResult *results; + + { + // Create scope so that FaceGeometryProto gets deallocated before we access the + // MPPFaceLandmarkerResult. + FaceGeometryProto faceGeometryProto{}; + auto *matrixData = faceGeometryProto.mutable_pose_transform_matrix(); + matrixData->set_cols(4); + matrixData->set_rows(4); + for (size_t i = 0; i < 4 * 4; ++i) { + matrixData->add_packed_data(0.1f * i); + } + + const std::vector faceGeometryProtos({faceGeometryProto}); + const auto faceGeometryPacket = MakePacket>(faceGeometryProtos); + results = [MPPFaceLandmarkerResult faceLandmarkerResultWithLandmarksPacket:{} + blendshapesPacket:{} + transformationMatrixesPacket:faceGeometryPacket]; + } + + XCTAssertEqual(results.facialTransformationMatrixes.count, 1); + XCTAssertEqual(results.facialTransformationMatrixes[0].rows, 4); + XCTAssertEqual(results.facialTransformationMatrixes[0].columns, 4); + for (size_t column = 0; column < 4; ++column) { + for (size_t row = 0; row < 4; ++row) { + XCTAssertEqualWithAccuracy( + [results.facialTransformationMatrixes[0] valueAtRow:row column:column], + 0.4f * row + 0.1f * column, /* accuracy= */ 0.0001f, @"at [%zu,%zu]", column, row); + } + } +} + +- (void)testCreatesResultFromEmptyPackets { + const Packet emptyPacket = Packet{}.At(Timestamp(0)); + MPPFaceLandmarkerResult *results = + [MPPFaceLandmarkerResult faceLandmarkerResultWithLandmarksPacket:emptyPacket + blendshapesPacket:emptyPacket + transformationMatrixesPacket:emptyPacket]; + + NSArray *emptyArray = [NSArray array]; + XCTAssertEqualObjects(results.faceLandmarks, emptyArray); + XCTAssertEqualObjects(results.faceBlendshapes, emptyArray); + XCTAssertEqualObjects(results.facialTransformationMatrixes, emptyArray); + XCTAssertEqual(results.timestampInMilliseconds, 0); +} + +@end diff --git a/mediapipe/tasks/ios/vision/face_landmarker/BUILD b/mediapipe/tasks/ios/vision/face_landmarker/BUILD index e2b0148fa..c4b172698 100644 --- a/mediapipe/tasks/ios/vision/face_landmarker/BUILD +++ b/mediapipe/tasks/ios/vision/face_landmarker/BUILD @@ -16,6 +16,22 @@ package(default_visibility = ["//mediapipe/tasks:internal"]) licenses(["notice"]) +objc_library( + name = "MPPFaceLandmarkerResult", + srcs = ["sources/MPPFaceLandmarkerResult.mm"], + hdrs = ["sources/MPPFaceLandmarkerResult.h"], + copts = [ + "-ObjC++", + "-std=c++17", + "-x objective-c++", + ], + deps = [ + "//mediapipe/tasks/ios/components/containers:MPPClassificationResult", + "//mediapipe/tasks/ios/components/containers:MPPLandmark", + "//mediapipe/tasks/ios/core:MPPTaskResult", + ], +) + objc_library( name = "MPPFaceLandmarkerOptions", srcs = ["sources/MPPFaceLandmarkerOptions.m"], diff --git a/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerResult.h b/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerResult.h new file mode 100644 index 000000000..c517ec158 --- /dev/null +++ b/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerResult.h @@ -0,0 +1,99 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import +#import "mediapipe/tasks/ios/components/containers/sources/MPPClassificationResult.h" +#import "mediapipe/tasks/ios/components/containers/sources/MPPLandmark.h" +#import "mediapipe/tasks/ios/core/sources/MPPTaskResult.h" + +NS_ASSUME_NONNULL_BEGIN + +/** A matrix that can be used for tansformations. */ +NS_SWIFT_NAME(TransformMatrix) +@interface MPPTransformMatrix : NSObject +/** The number of rows. */ +@property(nonatomic, readonly) NSUInteger rows; + +/** The number of columns. */ +@property(nonatomic, readonly) NSUInteger columns; + +/** The values of the transform matrix. */ +@property(nonatomic, readonly) float *data; + +/** + * Creates a new MPPTransformMatrix. + * + * @param data Pointer to the memory location where the data is stored. The data is copied. + * @param rows The number of rows. + * @param columns The number of columns. + */ +- (instancetype)initWithData:(const float *)data + rows:(NSInteger)rows + columns:(NSInteger)columns NS_DESIGNATED_INITIALIZER; + +- (instancetype)init NS_UNAVAILABLE; + +/** + * Returns the value located at the specified location. An NSRangeException is raised if the + * location is outside the range of the matrix. + */ +- (float)valueAtRow:(NSUInteger)row column:(NSUInteger)column; + ++ (instancetype)new NS_UNAVAILABLE; + +@end + +/** Represents the detection results generated by `MPPFaceLandmarker`. */ +NS_SWIFT_NAME(FaceLandmarkerResult) +@interface MPPFaceLandmarkerResult : MPPTaskResult + +/** Detected face landmarks in normalized image coordinates. */ +@property(nonatomic, readonly) NSArray *> *faceLandmarks; + +/** + * Face blendshapes results. Defaults to an empty array if not enabled. + */ +@property(nonatomic, readonly) NSArray *faceBlendshapes; + +/** + * Facial transformation 4x4 matrices. Defaults to an empty array if not enabled. + */ +@property(nonatomic, readonly) NSArray *facialTransformationMatrixes; + +/** + * Initializes a new `MPPFaceLandmarkerResult` with the given array of landmarks, blendshapes, + * facialTransformationMatrixes and timestamp (in milliseconds). + * + * @param faceLandmarks An array of `MPPNormalizedLandmark` objects. + * @param faceBlendshapes An array of `MPPClassifications` objects. + * @param facialTransformationMatrixes An array of flattended matrices. + * @param timestampInMilliseconds The timestamp (in milliseconds) for this result. + * + * @return An instance of `MPPFaceLandmarkerResult` initialized with the given array of detections + * and timestamp (in milliseconds). + */ +- (instancetype)initWithFaceLandmarks:(NSArray *> *)faceLandmarks + faceBlendshapes:(NSArray *)faceBlendshapes + facialTransformationMatrixes:(NSArray *)facialTransformationMatrixes + timestampInMilliseconds:(NSInteger)timestampInMilliseconds NS_DESIGNATED_INITIALIZER; + +- (instancetype)initWithTimestampInMilliseconds:(NSInteger)timestampInMilliseconds NS_UNAVAILABLE; + +- (instancetype)init NS_UNAVAILABLE; + ++ (instancetype)new NS_UNAVAILABLE; + +@end + +NS_ASSUME_NONNULL_END diff --git a/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerResult.mm b/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerResult.mm new file mode 100644 index 000000000..46d27fb82 --- /dev/null +++ b/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerResult.mm @@ -0,0 +1,74 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import + +#include + +#import "mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerResult.h" + +@interface MPPTransformMatrix () { + std::vector _data; +} +@end + +@implementation MPPTransformMatrix + +- (instancetype)initWithData:(const float *)data rows:(NSInteger)rows columns:(NSInteger)columns { + self = [super init]; + if (self) { + _rows = rows; + _columns = columns; + _data = std::vector(rows * columns); + memcpy(_data.data(), data, rows * columns * sizeof(float)); + } + return self; +} + +- (float *)data { + return _data.data(); +} + +- (float)valueAtRow:(NSUInteger)row column:(NSUInteger)column { + if (row < 0 || row >= self.rows) { + @throw [NSException exceptionWithName:NSRangeException + reason:@"Row is outside of matrix range." + userInfo:nil]; + } + if (column < 0 || column >= self.columns) { + @throw [NSException exceptionWithName:NSRangeException + reason:@"Column is outside of matrix range." + userInfo:nil]; + } + return _data[row * _rows + column]; +} + +@end + +@implementation MPPFaceLandmarkerResult + +- (instancetype)initWithFaceLandmarks:(NSArray *> *)faceLandmarks + faceBlendshapes:(NSArray *)faceBlendshapes + facialTransformationMatrixes:(NSArray *> *)facialTransformationMatrixes + timestampInMilliseconds:(NSInteger)timestampInMilliseconds { + self = [super initWithTimestampInMilliseconds:timestampInMilliseconds]; + if (self) { + _faceLandmarks = [faceLandmarks copy]; + _faceBlendshapes = [faceBlendshapes copy]; + _facialTransformationMatrixes = [facialTransformationMatrixes copy]; + } + return self; +} + +@end diff --git a/mediapipe/tasks/ios/vision/face_landmarker/utils/BUILD b/mediapipe/tasks/ios/vision/face_landmarker/utils/BUILD index 594c91384..33f3f5c81 100644 --- a/mediapipe/tasks/ios/vision/face_landmarker/utils/BUILD +++ b/mediapipe/tasks/ios/vision/face_landmarker/utils/BUILD @@ -31,3 +31,21 @@ objc_library( "//mediapipe/tasks/ios/vision/face_landmarker:MPPFaceLandmarkerOptions", ], ) + +objc_library( + name = "MPPFaceLandmarkerResultHelpers", + srcs = ["sources/MPPFaceLandmarkerResult+Helpers.mm"], + hdrs = ["sources/MPPFaceLandmarkerResult+Helpers.h"], + deps = [ + "//mediapipe/framework:packet", + "//mediapipe/framework/formats:classification_cc_proto", + "//mediapipe/framework/formats:landmark_cc_proto", + "//mediapipe/framework/formats:matrix_data_cc_proto", + "//mediapipe/tasks/cc/vision/face_geometry/proto:face_geometry_cc_proto", + "//mediapipe/tasks/ios/components/containers:MPPClassificationResult", + "//mediapipe/tasks/ios/components/containers:MPPLandmark", + "//mediapipe/tasks/ios/components/containers/utils:MPPClassificationResultHelpers", + "//mediapipe/tasks/ios/components/containers/utils:MPPLandmarkHelpers", + "//mediapipe/tasks/ios/vision/face_landmarker:MPPFaceLandmarkerResult", + ], +) diff --git a/mediapipe/tasks/ios/vision/face_landmarker/utils/sources/MPPFaceLandmarkerResult+Helpers.h b/mediapipe/tasks/ios/vision/face_landmarker/utils/sources/MPPFaceLandmarkerResult+Helpers.h new file mode 100644 index 000000000..422e1bf07 --- /dev/null +++ b/mediapipe/tasks/ios/vision/face_landmarker/utils/sources/MPPFaceLandmarkerResult+Helpers.h @@ -0,0 +1,44 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef __cplusplus +#error "This file requires Objective-C++." +#endif // __cplusplus + +#include "mediapipe/framework/packet.h" +#import "mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerResult.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface MPPFaceLandmarkerResult (Helpers) + +/** + * Creates an `MPPFaceLandmarkerResult` from the MediaPipe packets containing the results of the + * FaceLandmarker. + * + * @param landmarksPacket a MediaPipe packet wrapping a `std::vector`. + * @param blendshapesPacket a MediaPipe packet wrapping a `std::vector`. + * @param transformationMatrixesPacket a MediaPipe packet wrapping a + * `std::vector`. + * + * @return An `MPPFaceLandmarkerResult` object that contains the contenst of the provided packets. + */ ++ (MPPFaceLandmarkerResult *) + faceLandmarkerResultWithLandmarksPacket:(const ::mediapipe::Packet &)landmarksPacket + blendshapesPacket:(const ::mediapipe::Packet &)blendshapesPacket + transformationMatrixesPacket: + (const ::mediapipe::Packet &)transformationMatrixesPacket; +@end + +NS_ASSUME_NONNULL_END diff --git a/mediapipe/tasks/ios/vision/face_landmarker/utils/sources/MPPFaceLandmarkerResult+Helpers.mm b/mediapipe/tasks/ios/vision/face_landmarker/utils/sources/MPPFaceLandmarkerResult+Helpers.mm new file mode 100644 index 000000000..3c914615d --- /dev/null +++ b/mediapipe/tasks/ios/vision/face_landmarker/utils/sources/MPPFaceLandmarkerResult+Helpers.mm @@ -0,0 +1,101 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import "mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerResult.h" +#import "mediapipe/tasks/ios/vision/face_landmarker/utils/sources/MPPFaceLandmarkerResult+Helpers.h" + +#include "mediapipe/framework/formats/classification.pb.h" +#include "mediapipe/framework/formats/landmark.pb.h" +#include "mediapipe/framework/formats/matrix_data.pb.h" +#include "mediapipe/tasks/cc/vision/face_geometry/proto/face_geometry.pb.h" +#import "mediapipe/tasks/ios/components/containers/sources/MPPClassificationResult.h" +#import "mediapipe/tasks/ios/components/containers/sources/MPPLandmark.h" +#import "mediapipe/tasks/ios/components/containers/utils/sources/MPPClassificationResult+Helpers.h" +#import "mediapipe/tasks/ios/components/containers/utils/sources/MPPLandmark+Helpers.h" + +static constexpr int kMicrosecondsPerMillisecond = 1000; + +using ::mediapipe::Packet; +using NormalizedLandmarkListProto = ::mediapipe::NormalizedLandmarkList; +using ClassificationListProto = ::mediapipe::ClassificationList; +using FaceGeometryProto = ::mediapipe::tasks::vision::face_geometry::proto::FaceGeometry; + +@implementation MPPFaceLandmarkerResult (Helpers) + ++ (MPPFaceLandmarkerResult *) + faceLandmarkerResultWithLandmarksPacket:(const Packet &)landmarksPacket + blendshapesPacket:(const Packet &)blendshapesPacket + transformationMatrixesPacket:(const Packet &)transformationMatrixesPacket { + NSMutableArray *> *faceLandmarks; + NSMutableArray *faceBlendshapes; + NSMutableArray *facialTransformationMatrixes; + + if (landmarksPacket.ValidateAsType>().ok()) { + const std::vector &landmarkListProtos = + landmarksPacket.Get>(); + faceLandmarks = [NSMutableArray arrayWithCapacity:(NSUInteger)landmarkListProtos.size()]; + for (const auto &landmarkListProto : landmarkListProtos) { + NSMutableArray *currentFaceLandmarks = + [NSMutableArray arrayWithCapacity:(NSUInteger)landmarkListProto.landmark_size()]; + for (const auto &landmarkProto : landmarkListProto.landmark()) { + [currentFaceLandmarks + addObject:[MPPNormalizedLandmark normalizedLandmarkWithProto:landmarkProto]]; + } + [faceLandmarks addObject:currentFaceLandmarks]; + } + } else { + faceLandmarks = [NSMutableArray arrayWithCapacity:0]; + } + + if (blendshapesPacket.ValidateAsType>().ok()) { + const std::vector &classificationListProtos = + blendshapesPacket.Get>(); + faceBlendshapes = + [NSMutableArray arrayWithCapacity:(NSUInteger)classificationListProtos.size()]; + for (const auto &classificationListProto : classificationListProtos) { + [faceBlendshapes + addObject:[MPPClassifications + classificationsWithClassificationListProto:classificationListProto + headIndex:0 + headName:@""]]; + } + } else { + faceBlendshapes = [NSMutableArray arrayWithCapacity:0]; + } + + if (transformationMatrixesPacket.ValidateAsType>().ok()) { + const std::vector &geometryProtos = + transformationMatrixesPacket.Get>(); + facialTransformationMatrixes = + [NSMutableArray arrayWithCapacity:(NSUInteger)geometryProtos.size()]; + for (const auto &geometryProto : geometryProtos) { + MPPTransformMatrix *transformMatrix = [[MPPTransformMatrix alloc] + initWithData:geometryProto.pose_transform_matrix().packed_data().data() + rows:geometryProto.pose_transform_matrix().rows() + columns:geometryProto.pose_transform_matrix().cols()]; + [facialTransformationMatrixes addObject:transformMatrix]; + } + } else { + facialTransformationMatrixes = [NSMutableArray arrayWithCapacity:0]; + } + + return [[MPPFaceLandmarkerResult alloc] + initWithFaceLandmarks:faceLandmarks + faceBlendshapes:faceBlendshapes + facialTransformationMatrixes:facialTransformationMatrixes + timestampInMilliseconds:(NSInteger)(landmarksPacket.Timestamp().Value() / + kMicrosecondsPerMillisecond)]; +} + +@end