Add FaceLandmarker Result API
PiperOrigin-RevId: 535735431
This commit is contained in:
parent
e483b31fcf
commit
fddc3facf0
|
@ -19,7 +19,14 @@ NS_ASSUME_NONNULL_BEGIN
|
|||
|
||||
@interface MPPClassifications (Helpers)
|
||||
|
||||
+ (MPPClassifications *)classificationsWithProto:
|
||||
/** Creates a new `MPPClassifications` object from the given proto, head index and head name. */
|
||||
+ (MPPClassifications *)classificationsWithClassificationListProto:
|
||||
(const ::mediapipe::ClassificationList &)proto
|
||||
headIndex:(NSInteger)headIndex
|
||||
headName:(NSString *)headName;
|
||||
|
||||
/** Creates a new `MPPClassifications` object from the given classifications proto. */
|
||||
+ (MPPClassifications *)classificationsWithClassificationsProto:
|
||||
(const mediapipe::tasks::components::containers::proto::Classifications &)classificationsProto;
|
||||
|
||||
@end
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
#import "mediapipe/tasks/ios/components/containers/utils/sources/MPPClassificationResult+Helpers.h"
|
||||
|
||||
namespace {
|
||||
using ClassificationListProto = ::mediapipe::ClassificationList;
|
||||
using ClassificationsProto = ::mediapipe::tasks::components::containers::proto::Classifications;
|
||||
using ClassificationResultProto =
|
||||
::mediapipe::tasks::components::containers::proto::ClassificationResult;
|
||||
|
@ -24,7 +25,22 @@ using ClassificationResultProto =
|
|||
|
||||
@implementation MPPClassifications (Helpers)
|
||||
|
||||
+ (MPPClassifications *)classificationsWithProto:
|
||||
+ (MPPClassifications *)classificationsWithClassificationListProto:
|
||||
(const ClassificationListProto &)classificationListProto
|
||||
headIndex:(NSInteger)headIndex
|
||||
headName:(NSString *)headName {
|
||||
NSMutableArray<MPPCategory *> *categories =
|
||||
[NSMutableArray arrayWithCapacity:(NSUInteger)classificationListProto.classification_size()];
|
||||
for (const auto &classification : classificationListProto.classification()) {
|
||||
[categories addObject:[MPPCategory categoryWithProto:classification]];
|
||||
}
|
||||
|
||||
return [[MPPClassifications alloc] initWithHeadIndex:headIndex
|
||||
headName:headName
|
||||
categories:categories];
|
||||
}
|
||||
|
||||
+ (MPPClassifications *)classificationsWithClassificationsProto:
|
||||
(const ClassificationsProto &)classificationsProto {
|
||||
NSMutableArray<MPPCategory *> *categories =
|
||||
[NSMutableArray arrayWithCapacity:(NSUInteger)classificationsProto.classification_list()
|
||||
|
@ -33,14 +49,14 @@ using ClassificationResultProto =
|
|||
[categories addObject:[MPPCategory categoryWithProto:classification]];
|
||||
}
|
||||
|
||||
NSString *headName;
|
||||
if (classificationsProto.has_head_name()) {
|
||||
headName = [NSString stringWithCppString:classificationsProto.head_name()];
|
||||
}
|
||||
NSString *headName = classificationsProto.has_head_name()
|
||||
? [NSString stringWithCppString:classificationsProto.head_name()]
|
||||
: [NSString string];
|
||||
|
||||
return [[MPPClassifications alloc] initWithHeadIndex:(NSInteger)classificationsProto.head_index()
|
||||
headName:headName
|
||||
categories:categories];
|
||||
return [MPPClassifications
|
||||
classificationsWithClassificationListProto:classificationsProto.classification_list()
|
||||
headIndex:(NSInteger)classificationsProto.head_index()
|
||||
headName:headName];
|
||||
}
|
||||
|
||||
@end
|
||||
|
@ -52,7 +68,8 @@ using ClassificationResultProto =
|
|||
NSMutableArray *classifications = [NSMutableArray
|
||||
arrayWithCapacity:(NSUInteger)classificationResultProto.classifications_size()];
|
||||
for (const auto &classificationsProto : classificationResultProto.classifications()) {
|
||||
[classifications addObject:[MPPClassifications classificationsWithProto:classificationsProto]];
|
||||
[classifications addObject:[MPPClassifications
|
||||
classificationsWithClassificationsProto:classificationsProto]];
|
||||
}
|
||||
|
||||
NSInteger timestampInMilliseconds = 0;
|
||||
|
@ -62,7 +79,6 @@ using ClassificationResultProto =
|
|||
|
||||
return [[MPPClassificationResult alloc] initWithClassifications:classifications
|
||||
timestampInMilliseconds:timestampInMilliseconds];
|
||||
;
|
||||
}
|
||||
|
||||
@end
|
||||
|
|
55
mediapipe/tasks/ios/test/vision/face_landmarker/utils/BUILD
Normal file
55
mediapipe/tasks/ios/test/vision/face_landmarker/utils/BUILD
Normal file
|
@ -0,0 +1,55 @@
|
|||
load("@build_bazel_rules_apple//apple:ios.bzl", "ios_unit_test")
|
||||
load(
|
||||
"//mediapipe/framework/tool:ios.bzl",
|
||||
"MPP_TASK_MINIMUM_OS_VERSION",
|
||||
)
|
||||
load(
|
||||
"@org_tensorflow//tensorflow/lite:special_rules.bzl",
|
||||
"tflite_ios_lab_runner",
|
||||
)
|
||||
|
||||
package(default_visibility = ["//mediapipe/tasks:internal"])
|
||||
|
||||
licenses(["notice"])
|
||||
|
||||
# Default tags for filtering iOS targets. Targets are restricted to Apple platforms.
|
||||
TFL_DEFAULT_TAGS = [
|
||||
"apple",
|
||||
]
|
||||
|
||||
# Following sanitizer tests are not supported by iOS test targets.
|
||||
TFL_DISABLED_SANITIZER_TAGS = [
|
||||
"noasan",
|
||||
"nomsan",
|
||||
"notsan",
|
||||
]
|
||||
|
||||
objc_library(
|
||||
name = "MPPFaceLandmarkeResultHelpersTestLibary",
|
||||
testonly = 1,
|
||||
srcs = ["sources/MPPFaceLandmarkerResult+HelpersTests.mm"],
|
||||
copts = [
|
||||
"-ObjC++",
|
||||
"-std=c++17",
|
||||
"-x objective-c++",
|
||||
],
|
||||
deps = [
|
||||
"//mediapipe/framework:packet",
|
||||
"//mediapipe/framework/formats:classification_cc_proto",
|
||||
"//mediapipe/framework/formats:landmark_cc_proto",
|
||||
"//mediapipe/framework/formats:matrix_data_cc_proto",
|
||||
"//mediapipe/tasks/cc/vision/face_geometry/proto:face_geometry_cc_proto",
|
||||
"//mediapipe/tasks/ios/vision/face_landmarker:MPPFaceLandmarkerResult",
|
||||
"//mediapipe/tasks/ios/vision/face_landmarker/utils:MPPFaceLandmarkerResultHelpers",
|
||||
],
|
||||
)
|
||||
|
||||
ios_unit_test(
|
||||
name = "MPPFaceLandmarkeResultHelpersTest",
|
||||
minimum_os_version = MPP_TASK_MINIMUM_OS_VERSION,
|
||||
runner = tflite_ios_lab_runner("IOS_LATEST"),
|
||||
tags = TFL_DEFAULT_TAGS + TFL_DISABLED_SANITIZER_TAGS,
|
||||
deps = [
|
||||
":MPPFaceLandmarkeResultHelpersTestLibary",
|
||||
],
|
||||
)
|
|
@ -0,0 +1,112 @@
|
|||
// Copyright 2023 The MediaPipe Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#import <XCTest/XCTest.h>
|
||||
|
||||
#include "mediapipe/framework/formats/classification.pb.h"
|
||||
#include "mediapipe/framework/formats/landmark.pb.h"
|
||||
#include "mediapipe/framework/formats/matrix_data.pb.h"
|
||||
#include "mediapipe/framework/packet.h"
|
||||
#include "mediapipe/tasks/cc/vision/face_geometry/proto/face_geometry.pb.h"
|
||||
#import "mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerResult.h"
|
||||
#import "mediapipe/tasks/ios/vision/face_landmarker/utils/sources/MPPFaceLandmarkerResult+Helpers.h"
|
||||
|
||||
using ::mediapipe::MakePacket;
|
||||
using ::mediapipe::Packet;
|
||||
using ::mediapipe::Timestamp;
|
||||
using NormalizedLandmarkListProto = ::mediapipe::NormalizedLandmarkList;
|
||||
using ClassificationListProto = ::mediapipe::ClassificationList;
|
||||
using FaceGeometryProto = ::mediapipe::tasks::vision::face_geometry::proto::FaceGeometry;
|
||||
|
||||
static constexpr int kMicrosecondsPerMillisecond = 1000;
|
||||
|
||||
@interface MPPLandmarkerResultHelpersTests : XCTestCase {
|
||||
}
|
||||
@end
|
||||
|
||||
@implementation MPPLandmarkerResultHelpersTests
|
||||
|
||||
- (void)testCreatesResultFromLandmarkerPackets {
|
||||
const std::vector<NormalizedLandmarkListProto> normalizedLandmarkProtos({{}});
|
||||
const std::vector<ClassificationListProto> classificationProtos({{}});
|
||||
const std::vector<FaceGeometryProto> faceGeometryProto({{}});
|
||||
|
||||
const auto landmarksPacket =
|
||||
MakePacket<std::vector<NormalizedLandmarkListProto>>(normalizedLandmarkProtos)
|
||||
.At(Timestamp(42 * kMicrosecondsPerMillisecond));
|
||||
const auto classificationsPacket =
|
||||
MakePacket<std::vector<ClassificationListProto>>(classificationProtos)
|
||||
.At(Timestamp(42 * kMicrosecondsPerMillisecond));
|
||||
const auto faceGeometryPacket = MakePacket<std::vector<FaceGeometryProto>>(faceGeometryProto)
|
||||
.At(Timestamp(42 * kMicrosecondsPerMillisecond));
|
||||
|
||||
MPPFaceLandmarkerResult *results =
|
||||
[MPPFaceLandmarkerResult faceLandmarkerResultWithLandmarksPacket:landmarksPacket
|
||||
blendshapesPacket:classificationsPacket
|
||||
transformationMatrixesPacket:faceGeometryPacket];
|
||||
|
||||
XCTAssertEqual(results.faceLandmarks.count, 1);
|
||||
XCTAssertEqual(results.faceBlendshapes.count, 1);
|
||||
XCTAssertEqual(results.facialTransformationMatrixes.count, 1);
|
||||
XCTAssertEqual(results.timestampInMilliseconds, 42);
|
||||
}
|
||||
|
||||
- (void)testCreatesCreatesCopyOfFacialTransformationMatrix {
|
||||
MPPFaceLandmarkerResult *results;
|
||||
|
||||
{
|
||||
// Create scope so that FaceGeometryProto gets deallocated before we access the
|
||||
// MPPFaceLandmarkerResult.
|
||||
FaceGeometryProto faceGeometryProto{};
|
||||
auto *matrixData = faceGeometryProto.mutable_pose_transform_matrix();
|
||||
matrixData->set_cols(4);
|
||||
matrixData->set_rows(4);
|
||||
for (size_t i = 0; i < 4 * 4; ++i) {
|
||||
matrixData->add_packed_data(0.1f * i);
|
||||
}
|
||||
|
||||
const std::vector<FaceGeometryProto> faceGeometryProtos({faceGeometryProto});
|
||||
const auto faceGeometryPacket = MakePacket<std::vector<FaceGeometryProto>>(faceGeometryProtos);
|
||||
results = [MPPFaceLandmarkerResult faceLandmarkerResultWithLandmarksPacket:{}
|
||||
blendshapesPacket:{}
|
||||
transformationMatrixesPacket:faceGeometryPacket];
|
||||
}
|
||||
|
||||
XCTAssertEqual(results.facialTransformationMatrixes.count, 1);
|
||||
XCTAssertEqual(results.facialTransformationMatrixes[0].rows, 4);
|
||||
XCTAssertEqual(results.facialTransformationMatrixes[0].columns, 4);
|
||||
for (size_t column = 0; column < 4; ++column) {
|
||||
for (size_t row = 0; row < 4; ++row) {
|
||||
XCTAssertEqualWithAccuracy(
|
||||
[results.facialTransformationMatrixes[0] valueAtRow:row column:column],
|
||||
0.4f * row + 0.1f * column, /* accuracy= */ 0.0001f, @"at [%zu,%zu]", column, row);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
- (void)testCreatesResultFromEmptyPackets {
|
||||
const Packet emptyPacket = Packet{}.At(Timestamp(0));
|
||||
MPPFaceLandmarkerResult *results =
|
||||
[MPPFaceLandmarkerResult faceLandmarkerResultWithLandmarksPacket:emptyPacket
|
||||
blendshapesPacket:emptyPacket
|
||||
transformationMatrixesPacket:emptyPacket];
|
||||
|
||||
NSArray *emptyArray = [NSArray array];
|
||||
XCTAssertEqualObjects(results.faceLandmarks, emptyArray);
|
||||
XCTAssertEqualObjects(results.faceBlendshapes, emptyArray);
|
||||
XCTAssertEqualObjects(results.facialTransformationMatrixes, emptyArray);
|
||||
XCTAssertEqual(results.timestampInMilliseconds, 0);
|
||||
}
|
||||
|
||||
@end
|
|
@ -16,6 +16,22 @@ package(default_visibility = ["//mediapipe/tasks:internal"])
|
|||
|
||||
licenses(["notice"])
|
||||
|
||||
objc_library(
|
||||
name = "MPPFaceLandmarkerResult",
|
||||
srcs = ["sources/MPPFaceLandmarkerResult.mm"],
|
||||
hdrs = ["sources/MPPFaceLandmarkerResult.h"],
|
||||
copts = [
|
||||
"-ObjC++",
|
||||
"-std=c++17",
|
||||
"-x objective-c++",
|
||||
],
|
||||
deps = [
|
||||
"//mediapipe/tasks/ios/components/containers:MPPClassificationResult",
|
||||
"//mediapipe/tasks/ios/components/containers:MPPLandmark",
|
||||
"//mediapipe/tasks/ios/core:MPPTaskResult",
|
||||
],
|
||||
)
|
||||
|
||||
objc_library(
|
||||
name = "MPPFaceLandmarkerOptions",
|
||||
srcs = ["sources/MPPFaceLandmarkerOptions.m"],
|
||||
|
|
|
@ -0,0 +1,99 @@
|
|||
// Copyright 2023 The MediaPipe Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
#import "mediapipe/tasks/ios/components/containers/sources/MPPClassificationResult.h"
|
||||
#import "mediapipe/tasks/ios/components/containers/sources/MPPLandmark.h"
|
||||
#import "mediapipe/tasks/ios/core/sources/MPPTaskResult.h"
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
/** A matrix that can be used for tansformations. */
|
||||
NS_SWIFT_NAME(TransformMatrix)
|
||||
@interface MPPTransformMatrix : NSObject
|
||||
/** The number of rows. */
|
||||
@property(nonatomic, readonly) NSUInteger rows;
|
||||
|
||||
/** The number of columns. */
|
||||
@property(nonatomic, readonly) NSUInteger columns;
|
||||
|
||||
/** The values of the transform matrix. */
|
||||
@property(nonatomic, readonly) float *data;
|
||||
|
||||
/**
|
||||
* Creates a new MPPTransformMatrix.
|
||||
*
|
||||
* @param data Pointer to the memory location where the data is stored. The data is copied.
|
||||
* @param rows The number of rows.
|
||||
* @param columns The number of columns.
|
||||
*/
|
||||
- (instancetype)initWithData:(const float *)data
|
||||
rows:(NSInteger)rows
|
||||
columns:(NSInteger)columns NS_DESIGNATED_INITIALIZER;
|
||||
|
||||
- (instancetype)init NS_UNAVAILABLE;
|
||||
|
||||
/**
|
||||
* Returns the value located at the specified location. An NSRangeException is raised if the
|
||||
* location is outside the range of the matrix.
|
||||
*/
|
||||
- (float)valueAtRow:(NSUInteger)row column:(NSUInteger)column;
|
||||
|
||||
+ (instancetype)new NS_UNAVAILABLE;
|
||||
|
||||
@end
|
||||
|
||||
/** Represents the detection results generated by `MPPFaceLandmarker`. */
|
||||
NS_SWIFT_NAME(FaceLandmarkerResult)
|
||||
@interface MPPFaceLandmarkerResult : MPPTaskResult
|
||||
|
||||
/** Detected face landmarks in normalized image coordinates. */
|
||||
@property(nonatomic, readonly) NSArray<NSArray<MPPNormalizedLandmark *> *> *faceLandmarks;
|
||||
|
||||
/**
|
||||
* Face blendshapes results. Defaults to an empty array if not enabled.
|
||||
*/
|
||||
@property(nonatomic, readonly) NSArray<MPPClassifications *> *faceBlendshapes;
|
||||
|
||||
/**
|
||||
* Facial transformation 4x4 matrices. Defaults to an empty array if not enabled.
|
||||
*/
|
||||
@property(nonatomic, readonly) NSArray<MPPTransformMatrix *> *facialTransformationMatrixes;
|
||||
|
||||
/**
|
||||
* Initializes a new `MPPFaceLandmarkerResult` with the given array of landmarks, blendshapes,
|
||||
* facialTransformationMatrixes and timestamp (in milliseconds).
|
||||
*
|
||||
* @param faceLandmarks An array of `MPPNormalizedLandmark` objects.
|
||||
* @param faceBlendshapes An array of `MPPClassifications` objects.
|
||||
* @param facialTransformationMatrixes An array of flattended matrices.
|
||||
* @param timestampInMilliseconds The timestamp (in milliseconds) for this result.
|
||||
*
|
||||
* @return An instance of `MPPFaceLandmarkerResult` initialized with the given array of detections
|
||||
* and timestamp (in milliseconds).
|
||||
*/
|
||||
- (instancetype)initWithFaceLandmarks:(NSArray<NSArray<MPPNormalizedLandmark *> *> *)faceLandmarks
|
||||
faceBlendshapes:(NSArray<MPPClassifications *> *)faceBlendshapes
|
||||
facialTransformationMatrixes:(NSArray<MPPTransformMatrix *> *)facialTransformationMatrixes
|
||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds NS_DESIGNATED_INITIALIZER;
|
||||
|
||||
- (instancetype)initWithTimestampInMilliseconds:(NSInteger)timestampInMilliseconds NS_UNAVAILABLE;
|
||||
|
||||
- (instancetype)init NS_UNAVAILABLE;
|
||||
|
||||
+ (instancetype)new NS_UNAVAILABLE;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
|
@ -0,0 +1,74 @@
|
|||
// Copyright 2023 The MediaPipe Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
#include <vector>
|
||||
|
||||
#import "mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerResult.h"
|
||||
|
||||
@interface MPPTransformMatrix () {
|
||||
std::vector<float> _data;
|
||||
}
|
||||
@end
|
||||
|
||||
@implementation MPPTransformMatrix
|
||||
|
||||
- (instancetype)initWithData:(const float *)data rows:(NSInteger)rows columns:(NSInteger)columns {
|
||||
self = [super init];
|
||||
if (self) {
|
||||
_rows = rows;
|
||||
_columns = columns;
|
||||
_data = std::vector<float>(rows * columns);
|
||||
memcpy(_data.data(), data, rows * columns * sizeof(float));
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (float *)data {
|
||||
return _data.data();
|
||||
}
|
||||
|
||||
- (float)valueAtRow:(NSUInteger)row column:(NSUInteger)column {
|
||||
if (row < 0 || row >= self.rows) {
|
||||
@throw [NSException exceptionWithName:NSRangeException
|
||||
reason:@"Row is outside of matrix range."
|
||||
userInfo:nil];
|
||||
}
|
||||
if (column < 0 || column >= self.columns) {
|
||||
@throw [NSException exceptionWithName:NSRangeException
|
||||
reason:@"Column is outside of matrix range."
|
||||
userInfo:nil];
|
||||
}
|
||||
return _data[row * _rows + column];
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
@implementation MPPFaceLandmarkerResult
|
||||
|
||||
- (instancetype)initWithFaceLandmarks:(NSArray<NSArray<MPPNormalizedLandmark *> *> *)faceLandmarks
|
||||
faceBlendshapes:(NSArray<MPPClassifications *> *)faceBlendshapes
|
||||
facialTransformationMatrixes:(NSArray<NSArray<NSNumber *> *> *)facialTransformationMatrixes
|
||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds {
|
||||
self = [super initWithTimestampInMilliseconds:timestampInMilliseconds];
|
||||
if (self) {
|
||||
_faceLandmarks = [faceLandmarks copy];
|
||||
_faceBlendshapes = [faceBlendshapes copy];
|
||||
_facialTransformationMatrixes = [facialTransformationMatrixes copy];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
@end
|
|
@ -31,3 +31,21 @@ objc_library(
|
|||
"//mediapipe/tasks/ios/vision/face_landmarker:MPPFaceLandmarkerOptions",
|
||||
],
|
||||
)
|
||||
|
||||
objc_library(
|
||||
name = "MPPFaceLandmarkerResultHelpers",
|
||||
srcs = ["sources/MPPFaceLandmarkerResult+Helpers.mm"],
|
||||
hdrs = ["sources/MPPFaceLandmarkerResult+Helpers.h"],
|
||||
deps = [
|
||||
"//mediapipe/framework:packet",
|
||||
"//mediapipe/framework/formats:classification_cc_proto",
|
||||
"//mediapipe/framework/formats:landmark_cc_proto",
|
||||
"//mediapipe/framework/formats:matrix_data_cc_proto",
|
||||
"//mediapipe/tasks/cc/vision/face_geometry/proto:face_geometry_cc_proto",
|
||||
"//mediapipe/tasks/ios/components/containers:MPPClassificationResult",
|
||||
"//mediapipe/tasks/ios/components/containers:MPPLandmark",
|
||||
"//mediapipe/tasks/ios/components/containers/utils:MPPClassificationResultHelpers",
|
||||
"//mediapipe/tasks/ios/components/containers/utils:MPPLandmarkHelpers",
|
||||
"//mediapipe/tasks/ios/vision/face_landmarker:MPPFaceLandmarkerResult",
|
||||
],
|
||||
)
|
||||
|
|
|
@ -0,0 +1,44 @@
|
|||
// Copyright 2023 The MediaPipe Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#ifndef __cplusplus
|
||||
#error "This file requires Objective-C++."
|
||||
#endif // __cplusplus
|
||||
|
||||
#include "mediapipe/framework/packet.h"
|
||||
#import "mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerResult.h"
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
@interface MPPFaceLandmarkerResult (Helpers)
|
||||
|
||||
/**
|
||||
* Creates an `MPPFaceLandmarkerResult` from the MediaPipe packets containing the results of the
|
||||
* FaceLandmarker.
|
||||
*
|
||||
* @param landmarksPacket a MediaPipe packet wrapping a `std::vector<NormalizedLandmarkProto>`.
|
||||
* @param blendshapesPacket a MediaPipe packet wrapping a `std::vector<ClassificationProto>`.
|
||||
* @param transformationMatrixesPacket a MediaPipe packet wrapping a
|
||||
* `std::vector<FaceGeometryProto>`.
|
||||
*
|
||||
* @return An `MPPFaceLandmarkerResult` object that contains the contenst of the provided packets.
|
||||
*/
|
||||
+ (MPPFaceLandmarkerResult *)
|
||||
faceLandmarkerResultWithLandmarksPacket:(const ::mediapipe::Packet &)landmarksPacket
|
||||
blendshapesPacket:(const ::mediapipe::Packet &)blendshapesPacket
|
||||
transformationMatrixesPacket:
|
||||
(const ::mediapipe::Packet &)transformationMatrixesPacket;
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
|
@ -0,0 +1,101 @@
|
|||
// Copyright 2023 The MediaPipe Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#import "mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerResult.h"
|
||||
#import "mediapipe/tasks/ios/vision/face_landmarker/utils/sources/MPPFaceLandmarkerResult+Helpers.h"
|
||||
|
||||
#include "mediapipe/framework/formats/classification.pb.h"
|
||||
#include "mediapipe/framework/formats/landmark.pb.h"
|
||||
#include "mediapipe/framework/formats/matrix_data.pb.h"
|
||||
#include "mediapipe/tasks/cc/vision/face_geometry/proto/face_geometry.pb.h"
|
||||
#import "mediapipe/tasks/ios/components/containers/sources/MPPClassificationResult.h"
|
||||
#import "mediapipe/tasks/ios/components/containers/sources/MPPLandmark.h"
|
||||
#import "mediapipe/tasks/ios/components/containers/utils/sources/MPPClassificationResult+Helpers.h"
|
||||
#import "mediapipe/tasks/ios/components/containers/utils/sources/MPPLandmark+Helpers.h"
|
||||
|
||||
static constexpr int kMicrosecondsPerMillisecond = 1000;
|
||||
|
||||
using ::mediapipe::Packet;
|
||||
using NormalizedLandmarkListProto = ::mediapipe::NormalizedLandmarkList;
|
||||
using ClassificationListProto = ::mediapipe::ClassificationList;
|
||||
using FaceGeometryProto = ::mediapipe::tasks::vision::face_geometry::proto::FaceGeometry;
|
||||
|
||||
@implementation MPPFaceLandmarkerResult (Helpers)
|
||||
|
||||
+ (MPPFaceLandmarkerResult *)
|
||||
faceLandmarkerResultWithLandmarksPacket:(const Packet &)landmarksPacket
|
||||
blendshapesPacket:(const Packet &)blendshapesPacket
|
||||
transformationMatrixesPacket:(const Packet &)transformationMatrixesPacket {
|
||||
NSMutableArray<NSArray<MPPNormalizedLandmark *> *> *faceLandmarks;
|
||||
NSMutableArray<MPPClassifications *> *faceBlendshapes;
|
||||
NSMutableArray<MPPTransformMatrix *> *facialTransformationMatrixes;
|
||||
|
||||
if (landmarksPacket.ValidateAsType<std::vector<NormalizedLandmarkListProto>>().ok()) {
|
||||
const std::vector<NormalizedLandmarkListProto> &landmarkListProtos =
|
||||
landmarksPacket.Get<std::vector<NormalizedLandmarkListProto>>();
|
||||
faceLandmarks = [NSMutableArray arrayWithCapacity:(NSUInteger)landmarkListProtos.size()];
|
||||
for (const auto &landmarkListProto : landmarkListProtos) {
|
||||
NSMutableArray<MPPNormalizedLandmark *> *currentFaceLandmarks =
|
||||
[NSMutableArray arrayWithCapacity:(NSUInteger)landmarkListProto.landmark_size()];
|
||||
for (const auto &landmarkProto : landmarkListProto.landmark()) {
|
||||
[currentFaceLandmarks
|
||||
addObject:[MPPNormalizedLandmark normalizedLandmarkWithProto:landmarkProto]];
|
||||
}
|
||||
[faceLandmarks addObject:currentFaceLandmarks];
|
||||
}
|
||||
} else {
|
||||
faceLandmarks = [NSMutableArray arrayWithCapacity:0];
|
||||
}
|
||||
|
||||
if (blendshapesPacket.ValidateAsType<std::vector<ClassificationListProto>>().ok()) {
|
||||
const std::vector<ClassificationListProto> &classificationListProtos =
|
||||
blendshapesPacket.Get<std::vector<ClassificationListProto>>();
|
||||
faceBlendshapes =
|
||||
[NSMutableArray arrayWithCapacity:(NSUInteger)classificationListProtos.size()];
|
||||
for (const auto &classificationListProto : classificationListProtos) {
|
||||
[faceBlendshapes
|
||||
addObject:[MPPClassifications
|
||||
classificationsWithClassificationListProto:classificationListProto
|
||||
headIndex:0
|
||||
headName:@""]];
|
||||
}
|
||||
} else {
|
||||
faceBlendshapes = [NSMutableArray arrayWithCapacity:0];
|
||||
}
|
||||
|
||||
if (transformationMatrixesPacket.ValidateAsType<std::vector<FaceGeometryProto>>().ok()) {
|
||||
const std::vector<FaceGeometryProto> &geometryProtos =
|
||||
transformationMatrixesPacket.Get<std::vector<FaceGeometryProto>>();
|
||||
facialTransformationMatrixes =
|
||||
[NSMutableArray arrayWithCapacity:(NSUInteger)geometryProtos.size()];
|
||||
for (const auto &geometryProto : geometryProtos) {
|
||||
MPPTransformMatrix *transformMatrix = [[MPPTransformMatrix alloc]
|
||||
initWithData:geometryProto.pose_transform_matrix().packed_data().data()
|
||||
rows:geometryProto.pose_transform_matrix().rows()
|
||||
columns:geometryProto.pose_transform_matrix().cols()];
|
||||
[facialTransformationMatrixes addObject:transformMatrix];
|
||||
}
|
||||
} else {
|
||||
facialTransformationMatrixes = [NSMutableArray arrayWithCapacity:0];
|
||||
}
|
||||
|
||||
return [[MPPFaceLandmarkerResult alloc]
|
||||
initWithFaceLandmarks:faceLandmarks
|
||||
faceBlendshapes:faceBlendshapes
|
||||
facialTransformationMatrixes:facialTransformationMatrixes
|
||||
timestampInMilliseconds:(NSInteger)(landmarksPacket.Timestamp().Value() /
|
||||
kMicrosecondsPerMillisecond)];
|
||||
}
|
||||
|
||||
@end
|
Loading…
Reference in New Issue
Block a user