From 548b6e0b5e7ab61bd6f8db9b2d288d5891186030 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Thu, 31 Aug 2023 17:36:16 +0530 Subject: [PATCH 1/9] Added iOS Image Segmenter Tests --- .../ios/test/vision/image_segmenter/BUILD | 70 +++ .../image_segmenter/MPPImageSegmenterTests.mm | 575 ++++++++++++++++++ .../test/vision/image_segmenter/utils/BUILD | 55 ++ .../MPPFaceLandmarkerResult+HelpersTests.mm | 112 ++++ 4 files changed, 812 insertions(+) create mode 100644 mediapipe/tasks/ios/test/vision/image_segmenter/BUILD create mode 100644 mediapipe/tasks/ios/test/vision/image_segmenter/MPPImageSegmenterTests.mm create mode 100644 mediapipe/tasks/ios/test/vision/image_segmenter/utils/BUILD create mode 100644 mediapipe/tasks/ios/test/vision/image_segmenter/utils/sources/MPPFaceLandmarkerResult+HelpersTests.mm diff --git a/mediapipe/tasks/ios/test/vision/image_segmenter/BUILD b/mediapipe/tasks/ios/test/vision/image_segmenter/BUILD new file mode 100644 index 000000000..03c515529 --- /dev/null +++ b/mediapipe/tasks/ios/test/vision/image_segmenter/BUILD @@ -0,0 +1,70 @@ +load("@build_bazel_rules_apple//apple:ios.bzl", "ios_unit_test") +load( + "//mediapipe/framework/tool:ios.bzl", + "MPP_TASK_MINIMUM_OS_VERSION", +) +load( + "@org_tensorflow//tensorflow/lite:special_rules.bzl", + "tflite_ios_lab_runner", +) + +package(default_visibility = ["//mediapipe/tasks:internal"]) + +licenses(["notice"]) + +# Default tags for filtering iOS targets. Targets are restricted to Apple platforms. +TFL_DEFAULT_TAGS = [ + "apple", +] + +# Following sanitizer tests are not supported by iOS test targets. +TFL_DISABLED_SANITIZER_TAGS = [ + "noasan", + "nomsan", + "notsan", +] + +objc_library( + name = "MPPImageSegmenterObjcTestLibrary", + testonly = 1, + srcs = ["MPPImageSegmenterTests.mm"], + copts = [ + "-ObjC++", + "-std=c++17", + "-x objective-c++", + ], + data = [ + "//mediapipe/tasks/testdata/vision:test_images", + "//mediapipe/tasks/testdata/vision:test_models", + "//mediapipe/tasks/testdata/vision:test_protos", + ], + deps = [ + # "//mediapipe/framework/formats:classification_cc_proto", + # "//mediapipe/framework/formats:landmark_cc_proto", + # "//mediapipe/framework/formats:matrix_data_cc_proto", + # "//mediapipe/tasks/cc/vision/face_geometry/proto:face_geometry_cc_proto", + # "//mediapipe/tasks/ios/common:MPPCommon", + # "//mediapipe/tasks/ios/components/containers/utils:MPPClassificationResultHelpers", + # "//mediapipe/tasks/ios/components/containers/utils:MPPDetectionHelpers", + # "//mediapipe/tasks/ios/components/containers/utils:MPPLandmarkHelpers", + "//mediapipe/tasks/ios/test/vision/utils:MPPImageTestUtils", + "//mediapipe/tasks/ios/vision/image_segmenter:MPPImageSegmenter", + "//mediapipe/tasks/ios/vision/image_segmenter:MPPImageSegmenterResult", + "//third_party/apple_frameworks:UIKit", + ] + select({ + "//third_party:opencv_ios_sim_arm64_source_build": ["@ios_opencv_source//:opencv_xcframework"], + "//third_party:opencv_ios_arm64_source_build": ["@ios_opencv_source//:opencv_xcframework"], + "//third_party:opencv_ios_x86_64_source_build": ["@ios_opencv_source//:opencv_xcframework"], + "//conditions:default": ["@ios_opencv//:OpencvFramework"], + }), +) + +ios_unit_test( + name = "MPPImageSegmenterObjcTest", + minimum_os_version = MPP_TASK_MINIMUM_OS_VERSION, + runner = tflite_ios_lab_runner("IOS_LATEST"), + tags = TFL_DEFAULT_TAGS + TFL_DISABLED_SANITIZER_TAGS, + deps = [ + ":MPPImageSegmenterObjcTestLibrary", + ], +) diff --git a/mediapipe/tasks/ios/test/vision/image_segmenter/MPPImageSegmenterTests.mm b/mediapipe/tasks/ios/test/vision/image_segmenter/MPPImageSegmenterTests.mm new file mode 100644 index 000000000..1a1dcfc8a --- /dev/null +++ b/mediapipe/tasks/ios/test/vision/image_segmenter/MPPImageSegmenterTests.mm @@ -0,0 +1,575 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import +#import +#import + +// #include "mediapipe/framework/formats/classification.pb.h" +// #include "mediapipe/framework/formats/landmark.pb.h" +// #include "mediapipe/framework/formats/matrix_data.pb.h" +// #include "mediapipe/tasks/cc/vision/face_geometry/proto/face_geometry.pb.h" +// #import "mediapipe/tasks/ios/common/sources/MPPCommon.h" +// #import "mediapipe/tasks/ios/components/containers/utils/sources/MPPClassificationResult+Helpers.h" +// #import "mediapipe/tasks/ios/components/containers/utils/sources/MPPDetection+Helpers.h" +// #import "mediapipe/tasks/ios/components/containers/utils/sources/MPPLandmark+Helpers.h" +#import "mediapipe/tasks/ios/test/vision/utils/sources/MPPImage+TestUtils.h" +// #include "mediapipe/tasks/ios/test/vision/utils/sources/parse_proto_utils.h" +#import "mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenter.h" +#import "mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterResult.h" + +// using NormalizedLandmarkListProto = ::mediapipe::NormalizedLandmarkList; +// using ClassificationListProto = ::mediapipe::ClassificationList; +// using FaceGeometryProto = ::mediapipe::tasks::vision::face_geometry::proto::FaceGeometry; +// using ::mediapipe::tasks::ios::test::vision::utils::get_proto_from_pbtxt; + +static NSString *const kPbFileExtension = @"pbtxt"; + +typedef NSDictionary ResourceFileInfo; + +// static ResourceFileInfo *const kPortraitImage = +// @{@"name" : @"portrait", @"type" : @"jpg", @"orientation" : @(UIImageOrientationUp)}; +// static ResourceFileInfo *const kPortraitRotatedImage = +// @{@"name" : @"portrait_rotated", @"type" : @"jpg", @"orientation" : @(UIImageOrientationRight)}; +static ResourceFileInfo *const kCatImage = @{@"name" : @"cat", @"type" : @"jpg"}; +// static ResourceFileInfo *const kPortraitExpectedLandmarksName = +// @{@"name" : @"portrait_expected_face_landmarks", @"type" : kPbFileExtension}; +// static ResourceFileInfo *const kPortraitExpectedBlendshapesName = +// @{@"name" : @"portrait_expected_blendshapes", @"type" : kPbFileExtension}; +// static ResourceFileInfo *const kPortraitExpectedGeometryName = +// @{@"name" : @"portrait_expected_face_geometry", @"type" : kPbFileExtension}; +static NSString *const kImageSegmenterModelName = @"deeplabv3"; +// static NSString *const kFaceLandmarkerWithBlendshapesModelName = +// @"face_landmarker_v2_with_blendshapes"; +static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks"; +static NSString *const kLiveStreamTestsDictFaceLandmarkerKey = @"image_segmenter"; +static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; + +constexpr float kLandmarkErrorThreshold = 0.03f; +constexpr float kBlendshapesErrorThreshold = 0.1f; +constexpr float kFacialTransformationMatrixErrorThreshold = 0.2f; + +#define AssertEqualErrors(error, expectedError) \ + XCTAssertNotNil(error); \ + XCTAssertEqualObjects(error.domain, expectedError.domain); \ + XCTAssertEqual(error.code, expectedError.code); \ + XCTAssertEqualObjects(error.localizedDescription, expectedError.localizedDescription) + +@interface MPPImageSegmenterTests : XCTestCase { + NSDictionary *_liveStreamSucceedsTestDict; + NSDictionary *_outOfOrderTimestampTestDict; +} +@end + +@implementation MPPImageSegmenterTests + +#pragma mark General Tests + +- (MPPImage *)imageWithFileInfo:(ResourceFileInfo *)fileInfo { + UIImageOrientation orientation = (UIImageOrientation)[fileInfo[@"orientation"] intValue]; + MPPImage *image = [MPPImage imageFromBundleWithClass:[MPPImageSegmenterTests class] + fileName:fileInfo[@"name"] + ofType:fileInfo[@"type"] + orientation:orientation]; + XCTAssertNotNil(image); + return image; +} + ++ (NSString *)filePathWithName:(NSString *)fileName extension:(NSString *)extension { + NSString *filePath = + [[NSBundle bundleForClass:[MPPImageSegmenterTests class]] pathForResource:fileName + ofType:extension]; + return filePath; +} + +// - (void)testCreateImageSegmenterWithMissingModelPathFails { +// NSString *modelPath = [MPPFaceLandmarkerTests filePathWithName:@"" extension:@""]; + +// NSError *error = nil; +// MPPFaceLandmarker *faceLandmarker = [[MPPFaceLandmarker alloc] initWithModelPath:modelPath +// error:&error]; +// XCTAssertNil(faceLandmarker); + +// NSError *expectedError = [NSError +// errorWithDomain:kExpectedErrorDomain +// code:MPPTasksErrorCodeInvalidArgumentError +// userInfo:@{ +// NSLocalizedDescriptionKey : +// @"INVALID_ARGUMENT: ExternalFile must specify at least one of 'file_content', " +// @"'file_name', 'file_pointer_meta' or 'file_descriptor_meta'." +// }]; +// AssertEqualErrors(error, expectedError); +// } + +#pragma mark Image Mode Tests + +- (void)testDetectWithImageModeAndPotraitSucceeds { + NSString *modelPath = [MPPImageSegmenterTests filePathWithName:kImageSegmenterModelName + extension:@"tflite"]; + MPPImageSegmenter *imageSegmenter = [[MPPImageSegmenter alloc] initWithModelPath:modelPath + error:nil]; + + MPPImage *image = [self imageWithFileInfo:kCatImage]; + MPPImageSegmenterResult *result = [imageSegmenter segmentImage:image error:nil]; + // NSArray *expectedLandmarks = + // [MPPFaceLandmarkerTests expectedLandmarksFromFileInfo:kPortraitExpectedLandmarksName]; + // [self assertResultsOfDetectInImageWithFileInfo:kPortraitImage + // usingFaceLandmarker:faceLandmarker + // containsExpectedLandmarks:expectedLandmarks + // expectedBlendshapes:NULL + // expectedTransformationMatrix:NULL]; +} + +@end + +// - (void)testDetectWithImageModeAndPotraitAndFacialTransformationMatrixesSucceeds { +// MPPFaceLandmarkerOptions *options = +// [self faceLandmarkerOptionsWithModelName:kFaceLandmarkerModelName]; +// options.outputFacialTransformationMatrixes = YES; +// MPPFaceLandmarker *faceLandmarker = [[MPPFaceLandmarker alloc] initWithOptions:options error:nil]; + +// NSArray *expectedLandmarks = +// [MPPFaceLandmarkerTests expectedLandmarksFromFileInfo:kPortraitExpectedLandmarksName]; +// MPPTransformMatrix *expectedTransformationMatrix = [MPPFaceLandmarkerTests +// expectedTransformationMatrixFromFileInfo:kPortraitExpectedGeometryName]; +// [self assertResultsOfDetectInImageWithFileInfo:kPortraitImage +// usingFaceLandmarker:faceLandmarker +// containsExpectedLandmarks:expectedLandmarks +// expectedBlendshapes:NULL +// expectedTransformationMatrix:expectedTransformationMatrix]; +// } + +// - (void)testDetectWithImageModeAndNoFaceSucceeds { +// NSString *modelPath = [MPPFaceLandmarkerTests filePathWithName:kFaceLandmarkerModelName +// extension:@"task"]; +// MPPFaceLandmarker *faceLandmarker = [[MPPFaceLandmarker alloc] initWithModelPath:modelPath +// error:nil]; +// XCTAssertNotNil(faceLandmarker); + +// NSError *error; +// MPPImage *mppImage = [self imageWithFileInfo:kCatImage]; +// MPPFaceLandmarkerResult *faceLandmarkerResult = [faceLandmarker detectInImage:mppImage +// error:&error]; +// XCTAssertNil(error); +// XCTAssertNotNil(faceLandmarkerResult); +// XCTAssertEqualObjects(faceLandmarkerResult.faceLandmarks, [NSArray array]); +// XCTAssertEqualObjects(faceLandmarkerResult.faceBlendshapes, [NSArray array]); +// XCTAssertEqualObjects(faceLandmarkerResult.facialTransformationMatrixes, [NSArray array]); +// } + +// #pragma mark Video Mode Tests + +// - (void)testDetectWithVideoModeAndPotraitSucceeds { +// MPPFaceLandmarkerOptions *options = +// [self faceLandmarkerOptionsWithModelName:kFaceLandmarkerModelName]; +// options.runningMode = MPPRunningModeVideo; +// MPPFaceLandmarker *faceLandmarker = [[MPPFaceLandmarker alloc] initWithOptions:options error:nil]; + +// MPPImage *image = [self imageWithFileInfo:kPortraitImage]; +// NSArray *expectedLandmarks = +// [MPPFaceLandmarkerTests expectedLandmarksFromFileInfo:kPortraitExpectedLandmarksName]; +// for (int i = 0; i < 3; i++) { +// MPPFaceLandmarkerResult *faceLandmarkerResult = [faceLandmarker detectInVideoFrame:image +// timestampInMilliseconds:i +// error:nil]; +// [self assertFaceLandmarkerResult:faceLandmarkerResult +// containsExpectedLandmarks:expectedLandmarks +// expectedBlendshapes:NULL +// expectedTransformationMatrix:NULL]; +// } +// } + +// #pragma mark Live Stream Mode Tests + +// - (void)testDetectWithLiveStreamModeAndPotraitSucceeds { +// NSInteger iterationCount = 100; + +// // Because of flow limiting, the callback might be invoked fewer than `iterationCount` times. An +// // normal expectation will fail if expectation.fulfill() is not called +// // `expectation.expectedFulfillmentCount` times. If `expectation.isInverted = true`, the test will +// // only succeed if expectation is not fulfilled for the specified `expectedFulfillmentCount`. +// // Since it is not possible to predict how many times the expectation is supposed to be +// // fulfilled, `expectation.expectedFulfillmentCount` = `iterationCount` + 1 and +// // `expectation.isInverted = true` ensures that test succeeds if expectation is fulfilled <= +// // `iterationCount` times. +// XCTestExpectation *expectation = [[XCTestExpectation alloc] +// initWithDescription:@"detectWithOutOfOrderTimestampsAndLiveStream"]; +// expectation.expectedFulfillmentCount = iterationCount + 1; +// expectation.inverted = YES; + +// MPPFaceLandmarkerOptions *options = +// [self faceLandmarkerOptionsWithModelName:kFaceLandmarkerModelName]; +// options.runningMode = MPPRunningModeLiveStream; +// options.faceLandmarkerLiveStreamDelegate = self; + +// MPPFaceLandmarker *faceLandmarker = [[MPPFaceLandmarker alloc] initWithOptions:options error:nil]; +// MPPImage *image = [self imageWithFileInfo:kPortraitImage]; + +// _liveStreamSucceedsTestDict = @{ +// kLiveStreamTestsDictFaceLandmarkerKey : faceLandmarker, +// kLiveStreamTestsDictExpectationKey : expectation +// }; + +// for (int i = 0; i < iterationCount; i++) { +// XCTAssertTrue([faceLandmarker detectAsyncInImage:image timestampInMilliseconds:i error:nil]); +// } + +// NSTimeInterval timeout = 0.5f; +// [self waitForExpectations:@[ expectation ] timeout:timeout]; +// } + +// - (void)testDetectWithOutOfOrderTimestampsAndLiveStreamModeFails { +// MPPFaceLandmarkerOptions *options = +// [self faceLandmarkerOptionsWithModelName:kFaceLandmarkerModelName]; +// options.runningMode = MPPRunningModeLiveStream; +// options.faceLandmarkerLiveStreamDelegate = self; + +// XCTestExpectation *expectation = [[XCTestExpectation alloc] +// initWithDescription:@"detectWithOutOfOrderTimestampsAndLiveStream"]; +// expectation.expectedFulfillmentCount = 1; + +// MPPFaceLandmarker *faceLandmarker = [[MPPFaceLandmarker alloc] initWithOptions:options error:nil]; +// _liveStreamSucceedsTestDict = @{ +// kLiveStreamTestsDictFaceLandmarkerKey : faceLandmarker, +// kLiveStreamTestsDictExpectationKey : expectation +// }; + +// MPPImage *image = [self imageWithFileInfo:kPortraitImage]; +// XCTAssertTrue([faceLandmarker detectAsyncInImage:image timestampInMilliseconds:1 error:nil]); + +// NSError *error; +// XCTAssertFalse([faceLandmarker detectAsyncInImage:image timestampInMilliseconds:0 error:&error]); + +// NSError *expectedError = +// [NSError errorWithDomain:kExpectedErrorDomain +// code:MPPTasksErrorCodeInvalidArgumentError +// userInfo:@{ +// NSLocalizedDescriptionKey : +// @"INVALID_ARGUMENT: Input timestamp must be monotonically increasing." +// }]; +// AssertEqualErrors(error, expectedError); + +// NSTimeInterval timeout = 0.5f; +// [self waitForExpectations:@[ expectation ] timeout:timeout]; +// } + +// #pragma mark Running Mode Tests + +// - (void)testCreateFaceLandmarkerFailsWithDelegateInNonLiveStreamMode { +// MPPRunningMode runningModesToTest[] = {MPPRunningModeImage, MPPRunningModeVideo}; +// for (int i = 0; i < sizeof(runningModesToTest) / sizeof(runningModesToTest[0]); i++) { +// MPPFaceLandmarkerOptions *options = +// [self faceLandmarkerOptionsWithModelName:kFaceLandmarkerModelName]; + +// options.runningMode = runningModesToTest[i]; +// options.faceLandmarkerLiveStreamDelegate = self; + +// [self +// assertCreateFaceLandmarkerWithOptions:options +// failsWithExpectedError: +// [NSError errorWithDomain:kExpectedErrorDomain +// code:MPPTasksErrorCodeInvalidArgumentError +// userInfo:@{ +// NSLocalizedDescriptionKey : +// @"The vision task is in image or video mode. The " +// @"delegate must not be set in the task's options." +// }]]; +// } +// } + +// - (void)testCreateFaceLandmarkerFailsWithMissingDelegateInLiveStreamMode { +// MPPFaceLandmarkerOptions *options = +// [self faceLandmarkerOptionsWithModelName:kFaceLandmarkerModelName]; +// options.runningMode = MPPRunningModeLiveStream; + +// [self assertCreateFaceLandmarkerWithOptions:options +// failsWithExpectedError: +// [NSError errorWithDomain:kExpectedErrorDomain +// code:MPPTasksErrorCodeInvalidArgumentError +// userInfo:@{ +// NSLocalizedDescriptionKey : +// @"The vision task is in live stream mode. An " +// @"object must be set as the delegate of the task " +// @"in its options to ensure asynchronous delivery " +// @"of results." +// }]]; +// } + +// - (void)testDetectFailsWithCallingWrongAPIInImageMode { +// MPPFaceLandmarkerOptions *options = +// [self faceLandmarkerOptionsWithModelName:kFaceLandmarkerModelName]; +// MPPFaceLandmarker *faceLandmarker = [[MPPFaceLandmarker alloc] initWithOptions:options error:nil]; + +// MPPImage *image = [self imageWithFileInfo:kPortraitImage]; + +// NSError *liveStreamAPICallError; +// XCTAssertFalse([faceLandmarker detectAsyncInImage:image +// timestampInMilliseconds:0 +// error:&liveStreamAPICallError]); + +// NSError *expectedLiveStreamAPICallError = +// [NSError errorWithDomain:kExpectedErrorDomain +// code:MPPTasksErrorCodeInvalidArgumentError +// userInfo:@{ +// NSLocalizedDescriptionKey : @"The vision task is not initialized with live " +// @"stream mode. Current Running Mode: Image" +// }]; +// AssertEqualErrors(liveStreamAPICallError, expectedLiveStreamAPICallError); + +// NSError *videoAPICallError; +// XCTAssertFalse([faceLandmarker detectInVideoFrame:image +// timestampInMilliseconds:0 +// error:&videoAPICallError]); + +// NSError *expectedVideoAPICallError = +// [NSError errorWithDomain:kExpectedErrorDomain +// code:MPPTasksErrorCodeInvalidArgumentError +// userInfo:@{ +// NSLocalizedDescriptionKey : @"The vision task is not initialized with " +// @"video mode. Current Running Mode: Image" +// }]; +// AssertEqualErrors(videoAPICallError, expectedVideoAPICallError); +// } + +// - (void)testDetectFailsWithCallingWrongAPIInVideoMode { +// MPPFaceLandmarkerOptions *options = +// [self faceLandmarkerOptionsWithModelName:kFaceLandmarkerModelName]; +// options.runningMode = MPPRunningModeVideo; + +// MPPFaceLandmarker *faceLandmarker = [[MPPFaceLandmarker alloc] initWithOptions:options error:nil]; + +// MPPImage *image = [self imageWithFileInfo:kPortraitImage]; +// NSError *liveStreamAPICallError; +// XCTAssertFalse([faceLandmarker detectAsyncInImage:image +// timestampInMilliseconds:0 +// error:&liveStreamAPICallError]); + +// NSError *expectedLiveStreamAPICallError = +// [NSError errorWithDomain:kExpectedErrorDomain +// code:MPPTasksErrorCodeInvalidArgumentError +// userInfo:@{ +// NSLocalizedDescriptionKey : @"The vision task is not initialized with live " +// @"stream mode. Current Running Mode: Video" +// }]; +// AssertEqualErrors(liveStreamAPICallError, expectedLiveStreamAPICallError); + +// NSError *imageAPICallError; +// XCTAssertFalse([faceLandmarker detectInImage:image error:&imageAPICallError]); + +// NSError *expectedImageAPICallError = +// [NSError errorWithDomain:kExpectedErrorDomain +// code:MPPTasksErrorCodeInvalidArgumentError +// userInfo:@{ +// NSLocalizedDescriptionKey : @"The vision task is not initialized with " +// @"image mode. Current Running Mode: Video" +// }]; +// AssertEqualErrors(imageAPICallError, expectedImageAPICallError); +// } + +// - (void)testDetectFailsWithCallingWrongAPIInLiveStreamMode { +// MPPFaceLandmarkerOptions *options = +// [self faceLandmarkerOptionsWithModelName:kFaceLandmarkerModelName]; +// options.runningMode = MPPRunningModeLiveStream; +// options.faceLandmarkerLiveStreamDelegate = self; +// MPPFaceLandmarker *faceLandmarker = [[MPPFaceLandmarker alloc] initWithOptions:options error:nil]; + +// MPPImage *image = [self imageWithFileInfo:kPortraitImage]; + +// NSError *imageAPICallError; +// XCTAssertFalse([faceLandmarker detectInImage:image error:&imageAPICallError]); + +// NSError *expectedImageAPICallError = +// [NSError errorWithDomain:kExpectedErrorDomain +// code:MPPTasksErrorCodeInvalidArgumentError +// userInfo:@{ +// NSLocalizedDescriptionKey : @"The vision task is not initialized with " +// @"image mode. Current Running Mode: Live Stream" +// }]; +// AssertEqualErrors(imageAPICallError, expectedImageAPICallError); + +// NSError *videoAPICallError; +// XCTAssertFalse([faceLandmarker detectInVideoFrame:image +// timestampInMilliseconds:0 +// error:&videoAPICallError]); + +// NSError *expectedVideoAPICallError = +// [NSError errorWithDomain:kExpectedErrorDomain +// code:MPPTasksErrorCodeInvalidArgumentError +// userInfo:@{ +// NSLocalizedDescriptionKey : @"The vision task is not initialized with " +// @"video mode. Current Running Mode: Live Stream" +// }]; +// AssertEqualErrors(videoAPICallError, expectedVideoAPICallError); +// } + +// #pragma mark MPPFaceLandmarkerLiveStreamDelegate Methods +// - (void)faceLandmarker:(MPPFaceLandmarker *)faceLandmarker +// didFinishDetectionWithResult:(MPPFaceLandmarkerResult *)faceLandmarkerResult +// timestampInMilliseconds:(NSInteger)timestampInMilliseconds +// error:(NSError *)error { +// NSArray *expectedLandmarks = +// [MPPFaceLandmarkerTests expectedLandmarksFromFileInfo:kPortraitExpectedLandmarksName]; +// [self assertFaceLandmarkerResult:faceLandmarkerResult +// containsExpectedLandmarks:expectedLandmarks +// expectedBlendshapes:NULL +// expectedTransformationMatrix:NULL]; + +// if (faceLandmarker == _outOfOrderTimestampTestDict[kLiveStreamTestsDictFaceLandmarkerKey]) { +// [_outOfOrderTimestampTestDict[kLiveStreamTestsDictExpectationKey] fulfill]; +// } else if (faceLandmarker == _liveStreamSucceedsTestDict[kLiveStreamTestsDictFaceLandmarkerKey]) { +// [_liveStreamSucceedsTestDict[kLiveStreamTestsDictExpectationKey] fulfill]; +// } +// } + +// + (NSString *)filePathWithName:(NSString *)fileName extension:(NSString *)extension { +// NSString *filePath = +// [[NSBundle bundleForClass:[MPPFaceLandmarkerTests class]] pathForResource:fileName +// ofType:extension]; +// return filePath; +// } + +// + (NSArray *)expectedLandmarksFromFileInfo:(NSDictionary *)fileInfo { +// NSString *filePath = [self filePathWithName:fileInfo[@"name"] extension:fileInfo[@"type"]]; +// NormalizedLandmarkListProto proto; +// if (!get_proto_from_pbtxt([filePath UTF8String], proto).ok()) { +// return nil; +// } +// NSMutableArray *landmarks = +// [NSMutableArray arrayWithCapacity:(NSUInteger)proto.landmark_size()]; +// for (const auto &landmarkProto : proto.landmark()) { +// [landmarks addObject:[MPPNormalizedLandmark normalizedLandmarkWithProto:landmarkProto]]; +// } +// return landmarks; +// } + +// + (MPPClassifications *)expectedBlendshapesFromFileInfo:(NSDictionary *)fileInfo { +// NSString *filePath = [self filePathWithName:fileInfo[@"name"] extension:fileInfo[@"type"]]; +// ClassificationListProto proto; +// if (!get_proto_from_pbtxt([filePath UTF8String], proto).ok()) { +// return nil; +// } +// return [MPPClassifications classificationsWithClassificationListProto:proto +// headIndex:0 +// headName:[NSString string]]; +// } + +// + (MPPTransformMatrix *)expectedTransformationMatrixFromFileInfo:(NSDictionary *)fileInfo { +// NSString *filePath = [self filePathWithName:fileInfo[@"name"] extension:fileInfo[@"type"]]; +// FaceGeometryProto proto; +// if (!get_proto_from_pbtxt([filePath UTF8String], proto).ok()) { +// return nil; +// } +// return [[MPPTransformMatrix alloc] initWithData:proto.pose_transform_matrix().packed_data().data() +// rows:proto.pose_transform_matrix().rows() +// columns:proto.pose_transform_matrix().cols()]; +// } + +// - (void)assertFaceLandmarkerResult:(MPPFaceLandmarkerResult *)faceLandmarkerResult +// containsExpectedLandmarks:(NSArray *)expectedLandmarks +// expectedBlendshapes:(nullable MPPClassifications *)expectedBlendshapes +// expectedTransformationMatrix:(nullable MPPTransformMatrix *)expectedTransformationMatrix { +// NSArray *landmarks = faceLandmarkerResult.faceLandmarks[0]; +// XCTAssertEqual(landmarks.count, expectedLandmarks.count); +// for (int i = 0; i < landmarks.count; ++i) { +// XCTAssertEqualWithAccuracy(landmarks[i].x, expectedLandmarks[i].x, kLandmarkErrorThreshold, +// @"index i = %d", i); +// XCTAssertEqualWithAccuracy(landmarks[i].y, expectedLandmarks[i].y, kLandmarkErrorThreshold, +// @"index i = %d", i); +// } + +// if (expectedBlendshapes == NULL) { +// XCTAssertEqualObjects(faceLandmarkerResult.faceBlendshapes, [NSArray array]); +// } else { +// MPPClassifications *blendshapes = faceLandmarkerResult.faceBlendshapes[0]; +// NSArray *actualCategories = blendshapes.categories; +// NSArray *expectedCategories = expectedBlendshapes.categories; +// XCTAssertEqual(actualCategories.count, expectedCategories.count); +// for (int i = 0; i < actualCategories.count; ++i) { +// XCTAssertEqual(actualCategories[i].index, expectedCategories[i].index, @"index i = %d", i); +// XCTAssertEqualWithAccuracy(actualCategories[i].score, expectedCategories[i].score, +// kBlendshapesErrorThreshold, @"index i = %d", i); +// XCTAssertEqualObjects(actualCategories[i].categoryName, expectedCategories[i].categoryName, +// @"index i = %d", i); +// XCTAssertEqualObjects(actualCategories[i].displayName, expectedCategories[i].displayName, +// @"index i = %d", i); +// } +// } + +// if (expectedTransformationMatrix == NULL) { +// XCTAssertEqualObjects(faceLandmarkerResult.facialTransformationMatrixes, [NSArray array]); +// } else { +// MPPTransformMatrix *actualTransformationMatrix = +// faceLandmarkerResult.facialTransformationMatrixes[0]; +// XCTAssertEqual(actualTransformationMatrix.rows, expectedTransformationMatrix.rows); +// XCTAssertEqual(actualTransformationMatrix.columns, expectedTransformationMatrix.columns); +// for (int i = 0; i < actualTransformationMatrix.rows * actualTransformationMatrix.columns; ++i) { +// XCTAssertEqualWithAccuracy(actualTransformationMatrix.data[i], +// expectedTransformationMatrix.data[i], +// kFacialTransformationMatrixErrorThreshold, @"index i = %d", i); +// } +// } +// } + +// #pragma mark Face Landmarker Initializers + +// - (MPPFaceLandmarkerOptions *)faceLandmarkerOptionsWithModelName:(NSString *)modelName { +// NSString *modelPath = [MPPFaceLandmarkerTests filePathWithName:modelName extension:@"task"]; +// MPPFaceLandmarkerOptions *faceLandmarkerOptions = [[MPPFaceLandmarkerOptions alloc] init]; +// faceLandmarkerOptions.baseOptions.modelAssetPath = modelPath; +// return faceLandmarkerOptions; +// } + +// - (void)assertCreateFaceLandmarkerWithOptions:(MPPFaceLandmarkerOptions *)faceLandmarkerOptions +// failsWithExpectedError:(NSError *)expectedError { +// NSError *error = nil; +// MPPFaceLandmarker *faceLandmarker = +// [[MPPFaceLandmarker alloc] initWithOptions:faceLandmarkerOptions error:&error]; +// XCTAssertNil(faceLandmarker); +// AssertEqualErrors(error, expectedError); +// } + +// #pragma mark Assert Detection Results + +// - (MPPImage *)imageWithFileInfo:(ResourceFileInfo *)fileInfo { +// UIImageOrientation orientation = (UIImageOrientation)[fileInfo[@"orientation"] intValue]; +// MPPImage *image = [MPPImage imageFromBundleWithClass:[MPPFaceLandmarkerTests class] +// fileName:fileInfo[@"name"] +// ofType:fileInfo[@"type"] +// orientation:orientation]; +// XCTAssertNotNil(image); +// return image; +// } + +// - (void)assertResultsOfDetectInImageWithFileInfo:(ResourceFileInfo *)fileInfo +// usingFaceLandmarker:(MPPFaceLandmarker *)faceLandmarker +// containsExpectedLandmarks: +// (NSArray *)expectedLandmarks +// expectedBlendshapes:(nullable MPPClassifications *)expectedBlendshapes +// expectedTransformationMatrix: +// (nullable MPPTransformMatrix *)expectedTransformationMatrix { +// MPPImage *mppImage = [self imageWithFileInfo:fileInfo]; + +// NSError *error; +// MPPFaceLandmarkerResult *faceLandmarkerResult = [faceLandmarker detectInImage:mppImage +// error:&error]; +// XCTAssertNil(error); +// XCTAssertNotNil(faceLandmarkerResult); + +// [self assertFaceLandmarkerResult:faceLandmarkerResult +// containsExpectedLandmarks:expectedLandmarks +// expectedBlendshapes:expectedBlendshapes +// expectedTransformationMatrix:expectedTransformationMatrix]; +// } + +// @end diff --git a/mediapipe/tasks/ios/test/vision/image_segmenter/utils/BUILD b/mediapipe/tasks/ios/test/vision/image_segmenter/utils/BUILD new file mode 100644 index 000000000..74f2bd11a --- /dev/null +++ b/mediapipe/tasks/ios/test/vision/image_segmenter/utils/BUILD @@ -0,0 +1,55 @@ +load("@build_bazel_rules_apple//apple:ios.bzl", "ios_unit_test") +load( + "//mediapipe/framework/tool:ios.bzl", + "MPP_TASK_MINIMUM_OS_VERSION", +) +load( + "@org_tensorflow//tensorflow/lite:special_rules.bzl", + "tflite_ios_lab_runner", +) + +package(default_visibility = ["//mediapipe/tasks:internal"]) + +licenses(["notice"]) + +# Default tags for filtering iOS targets. Targets are restricted to Apple platforms. +TFL_DEFAULT_TAGS = [ + "apple", +] + +# Following sanitizer tests are not supported by iOS test targets. +TFL_DISABLED_SANITIZER_TAGS = [ + "noasan", + "nomsan", + "notsan", +] + +objc_library( + name = "MPPFaceLandmarkeResultHelpersTestLibary", + testonly = 1, + srcs = ["sources/MPPFaceLandmarkerResult+HelpersTests.mm"], + copts = [ + "-ObjC++", + "-std=c++17", + "-x objective-c++", + ], + deps = [ + "//mediapipe/framework:packet", + "//mediapipe/framework/formats:classification_cc_proto", + "//mediapipe/framework/formats:landmark_cc_proto", + "//mediapipe/framework/formats:matrix_data_cc_proto", + "//mediapipe/tasks/cc/vision/face_geometry/proto:face_geometry_cc_proto", + "//mediapipe/tasks/ios/vision/face_landmarker:MPPFaceLandmarkerResult", + "//mediapipe/tasks/ios/vision/face_landmarker/utils:MPPFaceLandmarkerResultHelpers", + ], +) + +ios_unit_test( + name = "MPPFaceLandmarkeResultHelpersTest", + minimum_os_version = MPP_TASK_MINIMUM_OS_VERSION, + runner = tflite_ios_lab_runner("IOS_LATEST"), + tags = TFL_DEFAULT_TAGS + TFL_DISABLED_SANITIZER_TAGS, + deps = [ + ":MPPFaceLandmarkeResultHelpersTestLibary", + ], +) diff --git a/mediapipe/tasks/ios/test/vision/image_segmenter/utils/sources/MPPFaceLandmarkerResult+HelpersTests.mm b/mediapipe/tasks/ios/test/vision/image_segmenter/utils/sources/MPPFaceLandmarkerResult+HelpersTests.mm new file mode 100644 index 000000000..3572aa47e --- /dev/null +++ b/mediapipe/tasks/ios/test/vision/image_segmenter/utils/sources/MPPFaceLandmarkerResult+HelpersTests.mm @@ -0,0 +1,112 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import + +#include "mediapipe/framework/formats/classification.pb.h" +#include "mediapipe/framework/formats/landmark.pb.h" +#include "mediapipe/framework/formats/matrix_data.pb.h" +#include "mediapipe/framework/packet.h" +#include "mediapipe/tasks/cc/vision/face_geometry/proto/face_geometry.pb.h" +#import "mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerResult.h" +#import "mediapipe/tasks/ios/vision/face_landmarker/utils/sources/MPPFaceLandmarkerResult+Helpers.h" + +using ::mediapipe::MakePacket; +using ::mediapipe::Packet; +using ::mediapipe::Timestamp; +using NormalizedLandmarkListProto = ::mediapipe::NormalizedLandmarkList; +using ClassificationListProto = ::mediapipe::ClassificationList; +using FaceGeometryProto = ::mediapipe::tasks::vision::face_geometry::proto::FaceGeometry; + +static constexpr int kMicrosecondsPerMillisecond = 1000; + +@interface MPPLandmarkerResultHelpersTests : XCTestCase { +} +@end + +@implementation MPPLandmarkerResultHelpersTests + +- (void)testCreatesResultFromLandmarkerPackets { + const std::vector normalizedLandmarkProtos({{}}); + const std::vector classificationProtos({{}}); + const std::vector faceGeometryProto({{}}); + + const auto landmarksPacket = + MakePacket>(normalizedLandmarkProtos) + .At(Timestamp(42 * kMicrosecondsPerMillisecond)); + const auto classificationsPacket = + MakePacket>(classificationProtos) + .At(Timestamp(42 * kMicrosecondsPerMillisecond)); + const auto faceGeometryPacket = MakePacket>(faceGeometryProto) + .At(Timestamp(42 * kMicrosecondsPerMillisecond)); + + MPPFaceLandmarkerResult *results = + [MPPFaceLandmarkerResult faceLandmarkerResultWithLandmarksPacket:landmarksPacket + blendshapesPacket:classificationsPacket + transformationMatrixesPacket:faceGeometryPacket]; + + XCTAssertEqual(results.faceLandmarks.count, 1); + XCTAssertEqual(results.faceBlendshapes.count, 1); + XCTAssertEqual(results.facialTransformationMatrixes.count, 1); + XCTAssertEqual(results.timestampInMilliseconds, 42); +} + +- (void)testCreatesCreatesCopyOfFacialTransformationMatrix { + MPPFaceLandmarkerResult *results; + + { + // Create scope so that FaceGeometryProto gets deallocated before we access the + // MPPFaceLandmarkerResult. + FaceGeometryProto faceGeometryProto{}; + auto *matrixData = faceGeometryProto.mutable_pose_transform_matrix(); + matrixData->set_cols(4); + matrixData->set_rows(4); + for (size_t i = 0; i < 4 * 4; ++i) { + matrixData->add_packed_data(0.1f * i); + } + + const std::vector faceGeometryProtos({faceGeometryProto}); + const auto faceGeometryPacket = MakePacket>(faceGeometryProtos); + results = [MPPFaceLandmarkerResult faceLandmarkerResultWithLandmarksPacket:{} + blendshapesPacket:{} + transformationMatrixesPacket:faceGeometryPacket]; + } + + XCTAssertEqual(results.facialTransformationMatrixes.count, 1); + XCTAssertEqual(results.facialTransformationMatrixes[0].rows, 4); + XCTAssertEqual(results.facialTransformationMatrixes[0].columns, 4); + for (size_t column = 0; column < 4; ++column) { + for (size_t row = 0; row < 4; ++row) { + XCTAssertEqualWithAccuracy( + [results.facialTransformationMatrixes[0] valueAtRow:row column:column], + 0.4f * row + 0.1f * column, /* accuracy= */ 0.0001f, @"at [%zu,%zu]", column, row); + } + } +} + +- (void)testCreatesResultFromEmptyPackets { + const Packet emptyPacket = Packet{}.At(Timestamp(0)); + MPPFaceLandmarkerResult *results = + [MPPFaceLandmarkerResult faceLandmarkerResultWithLandmarksPacket:emptyPacket + blendshapesPacket:emptyPacket + transformationMatrixesPacket:emptyPacket]; + + NSArray *emptyArray = [NSArray array]; + XCTAssertEqualObjects(results.faceLandmarks, emptyArray); + XCTAssertEqualObjects(results.faceBlendshapes, emptyArray); + XCTAssertEqualObjects(results.facialTransformationMatrixes, emptyArray); + XCTAssertEqual(results.timestampInMilliseconds, 0); +} + +@end From 81612281ff3d32710768b32ed2fa622ddc15020d Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Wed, 6 Sep 2023 18:29:28 +0530 Subject: [PATCH 2/9] Added MPPFileInfo for iOS test resource files. --- mediapipe/tasks/ios/test/utils/BUILD | 10 +++++ .../ios/test/utils/sources/MPPFileInfo.h | 42 +++++++++++++++++++ .../ios/test/utils/sources/MPPFileInfo.m | 33 +++++++++++++++ 3 files changed, 85 insertions(+) create mode 100644 mediapipe/tasks/ios/test/utils/BUILD create mode 100644 mediapipe/tasks/ios/test/utils/sources/MPPFileInfo.h create mode 100644 mediapipe/tasks/ios/test/utils/sources/MPPFileInfo.m diff --git a/mediapipe/tasks/ios/test/utils/BUILD b/mediapipe/tasks/ios/test/utils/BUILD new file mode 100644 index 000000000..3d9e1b20f --- /dev/null +++ b/mediapipe/tasks/ios/test/utils/BUILD @@ -0,0 +1,10 @@ +package(default_visibility = ["//mediapipe/tasks:internal"]) + +licenses(["notice"]) + +objc_library( + name = "MPPFileInfo", + srcs = ["sources/MPPFileInfo.m"], + hdrs = ["sources/MPPFileInfo.h"], + module_name = "MPPFileInfo", +) diff --git a/mediapipe/tasks/ios/test/utils/sources/MPPFileInfo.h b/mediapipe/tasks/ios/test/utils/sources/MPPFileInfo.h new file mode 100644 index 000000000..666ed0ace --- /dev/null +++ b/mediapipe/tasks/ios/test/utils/sources/MPPFileInfo.h @@ -0,0 +1,42 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import + +NS_ASSUME_NONNULL_BEGIN + +@interface MPPFileInfo : NSObject + +/** The name of the file. */ +@property(nonatomic, readonly) NSString *name; + +/** The type of the file. */ +@property(nonatomic, readonly) NSString *type; + +/** The path to file in the app bundle. */ +@property(nonatomic, readonly, nullable) NSString *path; + +/** + * Initializes an `MPPFileInfo` using the given name and type of file. + * + * @param name The name of the file. + * @param type The type of the file. + * + * @return The `MPPFileInfo` with the given name and type of file. + */ +- (instancetype)initWithName:(NSString *)name type:(NSString *)type; + +@end + +NS_ASSUME_NONNULL_END diff --git a/mediapipe/tasks/ios/test/utils/sources/MPPFileInfo.m b/mediapipe/tasks/ios/test/utils/sources/MPPFileInfo.m new file mode 100644 index 000000000..ef35f9bb8 --- /dev/null +++ b/mediapipe/tasks/ios/test/utils/sources/MPPFileInfo.m @@ -0,0 +1,33 @@ +// Copyright 2023 The TensorFlow Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import "mediapipe/tasks/ios/test/utils/sources/MPPFileInfo.h" + +@implementation MPPFileInfo + +- (instancetype)initWithName:(NSString *)name type:(NSString *)type { + self = [super init]; + if (self) { + _name = name; + _type = type; + } + + return self; +} + +- (NSString *)path { + return [[NSBundle bundleForClass:self.class] pathForResource:self.name ofType:self.type]; +} + +@end From 3b12cb57776cfa7d48e74f68afee2d5a9876001a Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Wed, 6 Sep 2023 18:31:20 +0530 Subject: [PATCH 3/9] Added iOS MPPMask test utils --- mediapipe/tasks/ios/test/vision/utils/BUILD | 16 ++++- .../vision/utils/sources/MPPMask+TestUtils.h | 42 +++++++++++++ .../vision/utils/sources/MPPMask+TestUtils.m | 63 +++++++++++++++++++ 3 files changed, 120 insertions(+), 1 deletion(-) create mode 100644 mediapipe/tasks/ios/test/vision/utils/sources/MPPMask+TestUtils.h create mode 100644 mediapipe/tasks/ios/test/vision/utils/sources/MPPMask+TestUtils.m diff --git a/mediapipe/tasks/ios/test/vision/utils/BUILD b/mediapipe/tasks/ios/test/vision/utils/BUILD index d117ad73d..0fca5c7e6 100644 --- a/mediapipe/tasks/ios/test/vision/utils/BUILD +++ b/mediapipe/tasks/ios/test/vision/utils/BUILD @@ -7,7 +7,21 @@ objc_library( srcs = ["sources/MPPImage+TestUtils.m"], hdrs = ["sources/MPPImage+TestUtils.h"], module_name = "MPPImageTestUtils", - deps = ["//mediapipe/tasks/ios/vision/core:MPPImage"], + deps = [ + "//mediapipe/tasks/ios/vision/core:MPPImage", + "//mediapipe/tasks/ios/test/utils:MPPFileInfo"], +) + +objc_library( + name = "MPPMaskTestUtils", + srcs = ["sources/MPPMask+TestUtils.m"], + hdrs = ["sources/MPPMask+TestUtils.h"], + module_name = "MPPMaskTestUtils", + deps = [ + "//mediapipe/tasks/ios/vision/core:MPPMask", + "//mediapipe/tasks/ios/test/utils:MPPFileInfo", + "//third_party/apple_frameworks:UIKit", + ] ) cc_library( diff --git a/mediapipe/tasks/ios/test/vision/utils/sources/MPPMask+TestUtils.h b/mediapipe/tasks/ios/test/vision/utils/sources/MPPMask+TestUtils.h new file mode 100644 index 000000000..066fcfa8a --- /dev/null +++ b/mediapipe/tasks/ios/test/vision/utils/sources/MPPMask+TestUtils.h @@ -0,0 +1,42 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import +#import + +#import "mediapipe/tasks/ios/test/utils/sources/MPPFileInfo.h" +#import "mediapipe/tasks/ios/vision/core/sources/MPPMask.h" + +NS_ASSUME_NONNULL_BEGIN + +/** + * Helper utility for initializing `MPPMask` for MediaPipe iOS vision library tests. + */ +@interface MPPMask (TestUtils) + +/** + * Loads an image from a file in an app bundle and Creates an `MPPMask` of type + * `MPPMaskDataTypeUInt8` using the gray scale pixel data of a `UIImage` loaded from a file with the + * given `MPPFileInfo`. + * + * @param fileInfo The file info specifying the name and type of the image file in the app bundle. + * + * @return The `MPPMask` with the pixel data of the loaded image. This method returns `nil` if there + * is an error in loading the image correctly. + */ +- (nullable instancetype)initWithImageFileInfo:(MPPFileInfo *)fileInfo; + +@end + +NS_ASSUME_NONNULL_END diff --git a/mediapipe/tasks/ios/test/vision/utils/sources/MPPMask+TestUtils.m b/mediapipe/tasks/ios/test/vision/utils/sources/MPPMask+TestUtils.m new file mode 100644 index 000000000..d57be8383 --- /dev/null +++ b/mediapipe/tasks/ios/test/vision/utils/sources/MPPMask+TestUtils.m @@ -0,0 +1,63 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import "mediapipe/tasks/ios/test/vision/utils/sources/MPPMask+TestUtils.h" + +@implementation MPPMask (TestUtils) + +- (instancetype)initWithImageFileInfo:(MPPFileInfo *)fileInfo { + UIImage *image = [[UIImage alloc] initWithContentsOfFile:fileInfo.path]; + + if (!image.CGImage) { + return nil; + } + + size_t width = CGImageGetWidth(image.CGImage); + size_t height = CGImageGetHeight(image.CGImage); + + NSInteger bitsPerComponent = 8; + + UInt8 *pixelData = NULL; + + CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceGray(); + + // For a gray scale image (single component) with no alpha, the bitmap info is + // `kCGImageAlphaNone` in combination with bytesPerRow being equal to width. + CGContextRef context = CGBitmapContextCreate(nil, width, height, bitsPerComponent, width, + colorSpace, kCGImageAlphaNone); + + if (!context) { + CGColorSpaceRelease(colorSpace); + return nil; + } + + CGContextDrawImage(context, CGRectMake(0, 0, width, height), image.CGImage); + pixelData = (UInt8 *)CGBitmapContextGetData(context); + + // A copy is needed to ensure that the pixel data outlives the `CGContextRelease` call. + // Alternative is to make the context, color space instance variables and release them in + // `dealloc()`. Since Categories don't allow adding instance variables, choosing to copy rather + // than creating a new custom class similar to `MPPMask` only for the tests. + MPPMask *mask = [[MPPMask alloc] initWithUInt8Data:pixelData + width:width + height:height + shouldCopy:YES]; + + CGColorSpaceRelease(colorSpace); + CGContextRelease(context); + + return mask; +} + +@end \ No newline at end of file From d3c83185ee158ff78476c45de1279cd83f70aa11 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Wed, 6 Sep 2023 18:31:45 +0530 Subject: [PATCH 4/9] Added methods to MPPImage test utils --- .../vision/utils/sources/MPPImage+TestUtils.h | 30 +++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/mediapipe/tasks/ios/test/vision/utils/sources/MPPImage+TestUtils.h b/mediapipe/tasks/ios/test/vision/utils/sources/MPPImage+TestUtils.h index 8cd1c6a67..585677b3f 100644 --- a/mediapipe/tasks/ios/test/vision/utils/sources/MPPImage+TestUtils.h +++ b/mediapipe/tasks/ios/test/vision/utils/sources/MPPImage+TestUtils.h @@ -14,6 +14,7 @@ #import +#import "mediapipe/tasks/ios/test/utils/sources/MPPFileInfo.h" #import "mediapipe/tasks/ios/vision/core/sources/MPPImage.h" NS_ASSUME_NONNULL_BEGIN @@ -23,6 +24,34 @@ NS_ASSUME_NONNULL_BEGIN */ @interface MPPImage (TestUtils) +/** + * Loads an image from a file in an app bundle into a `MPPImage` object. + * + * @param fileInfo The file info specifying the name and extension of the image + * file in the bundle. + * + * @return The `MPPImage` object contains the loaded image. This method returns + * nil if it cannot load the image. + */ ++ (MPPImage *)imageWithFileInfo:(MPPFileInfo *)fileInfo; + +NS_SWIFT_NAME(image(withFileInfo:)); + +/** + * Loads an image from a file in an app bundle into a `MPPImage` object with the specified + * orientation. + * + * @param fileInfo The file info specifying the name and extension of the image + * file in the bundle. + * + * @return The `MPPImage` object contains the loaded image. This method returns + * nil if it cannot load the image. + */ ++ (MPPImage *)imageWithFileInfo:(MPPFileInfo *)fileInfo + orientation:(UIImageOrientation)orientation + NS_SWIFT_NAME(image(withFileInfo:orientation:)); + +// TODO: Remove after all tests are migrated /** * Loads an image from a file in an app bundle into a `MPPImage` object. * @@ -39,6 +68,7 @@ NS_ASSUME_NONNULL_BEGIN ofType:(NSString *)type NS_SWIFT_NAME(imageFromBundle(class:filename:type:)); +// TODO: Remove after all tests are migrated /** * Loads an image from a file in an app bundle into a `MPPImage` object with the specified * orientation. From 218bfc8f69ff41a7c59f187d39d32058cba8c3ec Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Wed, 6 Sep 2023 18:32:08 +0530 Subject: [PATCH 5/9] Added implementation of methods in MPPImage test utils --- .../vision/utils/sources/MPPImage+TestUtils.m | 24 +++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/mediapipe/tasks/ios/test/vision/utils/sources/MPPImage+TestUtils.m b/mediapipe/tasks/ios/test/vision/utils/sources/MPPImage+TestUtils.m index 0b0ef9fbf..f922146fc 100644 --- a/mediapipe/tasks/ios/test/vision/utils/sources/MPPImage+TestUtils.m +++ b/mediapipe/tasks/ios/test/vision/utils/sources/MPPImage+TestUtils.m @@ -14,6 +14,7 @@ #import "mediapipe/tasks/ios/test/vision/utils/sources/MPPImage+TestUtils.h" +// TODO: Remove this category after all tests are migrated to the new methods. @interface UIImage (FileUtils) + (nullable UIImage *)imageFromBundleWithClass:(Class)classObject @@ -37,6 +38,28 @@ @implementation MPPImage (TestUtils) ++ (MPPImage *)imageWithFileInfo:(MPPFileInfo *)fileInfo { + if (!fileInfo.path) return nil; + + UIImage *image = [[UIImage alloc] initWithContentsOfFile:fileInfo.path]; + + if (!image) return nil; + + return [[MPPImage alloc] initWithUIImage:image error:nil]; +} + ++ (MPPImage *)imageWithFileInfo:(MPPFileInfo *)fileInfo + orientation:(UIImageOrientation)orientation { + if (!fileInfo.path) return nil; + + UIImage *image = [[UIImage alloc] initWithContentsOfFile:fileInfo.path]; + + if (!image) return nil; + + return [[MPPImage alloc] initWithUIImage:image orientation:orientation error:nil]; +} + +// TODO: Remove after all tests are migrated + (nullable MPPImage *)imageFromBundleWithClass:(Class)classObject fileName:(NSString *)name ofType:(NSString *)type { @@ -45,6 +68,7 @@ return [[MPPImage alloc] initWithUIImage:image error:nil]; } +// TODO: Remove after all tests are migrated + (nullable MPPImage *)imageFromBundleWithClass:(Class)classObject fileName:(NSString *)name ofType:(NSString *)type From b1915d8801a551b391c32e9630a8a925fe54e9ad Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Wed, 6 Sep 2023 18:32:34 +0530 Subject: [PATCH 6/9] Fixed typo in MPPImageSegmenterResultHelpers --- .../utils/sources/MPPImageSegmenterResult+Helpers.mm | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mediapipe/tasks/ios/vision/image_segmenter/utils/sources/MPPImageSegmenterResult+Helpers.mm b/mediapipe/tasks/ios/vision/image_segmenter/utils/sources/MPPImageSegmenterResult+Helpers.mm index d6e3b1be8..885df734d 100644 --- a/mediapipe/tasks/ios/vision/image_segmenter/utils/sources/MPPImageSegmenterResult+Helpers.mm +++ b/mediapipe/tasks/ios/vision/image_segmenter/utils/sources/MPPImageSegmenterResult+Helpers.mm @@ -52,7 +52,7 @@ using ::mediapipe::Packet; } if (categoryMaskPacket.ValidateAsType().ok()) { - const Image &cppCategoryMask = confidenceMasksPacket.Get(); + const Image &cppCategoryMask = categoryMaskPacket.Get(); categoryMask = [[MPPMask alloc] initWithUInt8Data:(UInt8 *)cppCategoryMask.GetImageFrameSharedPtr().get()->PixelData() width:cppCategoryMask.width() From 7707bd4061b581c0f4b7977b498f7cd39a9631ea Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Wed, 6 Sep 2023 18:33:12 +0530 Subject: [PATCH 7/9] Added methods to MPPImageSegmenterTests --- .../ios/test/vision/image_segmenter/BUILD | 10 +- .../image_segmenter/MPPImageSegmenterTests.mm | 721 +++++------------- 2 files changed, 191 insertions(+), 540 deletions(-) diff --git a/mediapipe/tasks/ios/test/vision/image_segmenter/BUILD b/mediapipe/tasks/ios/test/vision/image_segmenter/BUILD index 03c515529..1df56336e 100644 --- a/mediapipe/tasks/ios/test/vision/image_segmenter/BUILD +++ b/mediapipe/tasks/ios/test/vision/image_segmenter/BUILD @@ -39,18 +39,10 @@ objc_library( "//mediapipe/tasks/testdata/vision:test_protos", ], deps = [ - # "//mediapipe/framework/formats:classification_cc_proto", - # "//mediapipe/framework/formats:landmark_cc_proto", - # "//mediapipe/framework/formats:matrix_data_cc_proto", - # "//mediapipe/tasks/cc/vision/face_geometry/proto:face_geometry_cc_proto", - # "//mediapipe/tasks/ios/common:MPPCommon", - # "//mediapipe/tasks/ios/components/containers/utils:MPPClassificationResultHelpers", - # "//mediapipe/tasks/ios/components/containers/utils:MPPDetectionHelpers", - # "//mediapipe/tasks/ios/components/containers/utils:MPPLandmarkHelpers", "//mediapipe/tasks/ios/test/vision/utils:MPPImageTestUtils", + "//mediapipe/tasks/ios/test/vision/utils:MPPMaskTestUtils", "//mediapipe/tasks/ios/vision/image_segmenter:MPPImageSegmenter", "//mediapipe/tasks/ios/vision/image_segmenter:MPPImageSegmenterResult", - "//third_party/apple_frameworks:UIKit", ] + select({ "//third_party:opencv_ios_sim_arm64_source_build": ["@ios_opencv_source//:opencv_xcframework"], "//third_party:opencv_ios_arm64_source_build": ["@ios_opencv_source//:opencv_xcframework"], diff --git a/mediapipe/tasks/ios/test/vision/image_segmenter/MPPImageSegmenterTests.mm b/mediapipe/tasks/ios/test/vision/image_segmenter/MPPImageSegmenterTests.mm index 1a1dcfc8a..9df89ad2a 100644 --- a/mediapipe/tasks/ios/test/vision/image_segmenter/MPPImageSegmenterTests.mm +++ b/mediapipe/tasks/ios/test/vision/image_segmenter/MPPImageSegmenterTests.mm @@ -13,563 +13,222 @@ // limitations under the License. #import -#import #import -// #include "mediapipe/framework/formats/classification.pb.h" -// #include "mediapipe/framework/formats/landmark.pb.h" -// #include "mediapipe/framework/formats/matrix_data.pb.h" -// #include "mediapipe/tasks/cc/vision/face_geometry/proto/face_geometry.pb.h" -// #import "mediapipe/tasks/ios/common/sources/MPPCommon.h" -// #import "mediapipe/tasks/ios/components/containers/utils/sources/MPPClassificationResult+Helpers.h" -// #import "mediapipe/tasks/ios/components/containers/utils/sources/MPPDetection+Helpers.h" -// #import "mediapipe/tasks/ios/components/containers/utils/sources/MPPLandmark+Helpers.h" #import "mediapipe/tasks/ios/test/vision/utils/sources/MPPImage+TestUtils.h" -// #include "mediapipe/tasks/ios/test/vision/utils/sources/parse_proto_utils.h" +#import "mediapipe/tasks/ios/test/vision/utils/sources/MPPMask+TestUtils.h" #import "mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenter.h" #import "mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterResult.h" -// using NormalizedLandmarkListProto = ::mediapipe::NormalizedLandmarkList; -// using ClassificationListProto = ::mediapipe::ClassificationList; -// using FaceGeometryProto = ::mediapipe::tasks::vision::face_geometry::proto::FaceGeometry; -// using ::mediapipe::tasks::ios::test::vision::utils::get_proto_from_pbtxt; - -static NSString *const kPbFileExtension = @"pbtxt"; - -typedef NSDictionary ResourceFileInfo; - -// static ResourceFileInfo *const kPortraitImage = -// @{@"name" : @"portrait", @"type" : @"jpg", @"orientation" : @(UIImageOrientationUp)}; -// static ResourceFileInfo *const kPortraitRotatedImage = -// @{@"name" : @"portrait_rotated", @"type" : @"jpg", @"orientation" : @(UIImageOrientationRight)}; -static ResourceFileInfo *const kCatImage = @{@"name" : @"cat", @"type" : @"jpg"}; -// static ResourceFileInfo *const kPortraitExpectedLandmarksName = -// @{@"name" : @"portrait_expected_face_landmarks", @"type" : kPbFileExtension}; -// static ResourceFileInfo *const kPortraitExpectedBlendshapesName = -// @{@"name" : @"portrait_expected_blendshapes", @"type" : kPbFileExtension}; -// static ResourceFileInfo *const kPortraitExpectedGeometryName = -// @{@"name" : @"portrait_expected_face_geometry", @"type" : kPbFileExtension}; -static NSString *const kImageSegmenterModelName = @"deeplabv3"; -// static NSString *const kFaceLandmarkerWithBlendshapesModelName = -// @"face_landmarker_v2_with_blendshapes"; +static MPPFileInfo *const kCatImageFileInfo = [[MPPFileInfo alloc] initWithName:@"cat" + type:@"jpg"]; +static MPPFileInfo *const kCatGoldenImageFileInfo = [[MPPFileInfo alloc] initWithName:@"cat_mask" + type:@"jpg"]; +static MPPFileInfo *const kSegmentationImageFileInfo = + [[MPPFileInfo alloc] initWithName:@"segmentation_input_rotation0" type:@"jpg"]; +static MPPFileInfo *const kSegmentationGoldenImageFileInfo = + [[MPPFileInfo alloc] initWithName:@"segmentation_golden_rotation0" type:@"png"]; +static MPPFileInfo *const kImageSegmenterModel = [[MPPFileInfo alloc] initWithName:@"deeplabv3" + type:@"tflite"]; static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks"; -static NSString *const kLiveStreamTestsDictFaceLandmarkerKey = @"image_segmenter"; -static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; +constexpr float kSimilarityThreshold = 0.96f; +constexpr NSInteger kMagnificationFactor = 10; -constexpr float kLandmarkErrorThreshold = 0.03f; -constexpr float kBlendshapesErrorThreshold = 0.1f; -constexpr float kFacialTransformationMatrixErrorThreshold = 0.2f; - -#define AssertEqualErrors(error, expectedError) \ - XCTAssertNotNil(error); \ - XCTAssertEqualObjects(error.domain, expectedError.domain); \ - XCTAssertEqual(error.code, expectedError.code); \ - XCTAssertEqualObjects(error.localizedDescription, expectedError.localizedDescription) - -@interface MPPImageSegmenterTests : XCTestCase { - NSDictionary *_liveStreamSucceedsTestDict; - NSDictionary *_outOfOrderTimestampTestDict; +double sum(const float *mask, size_t size) { + double sum = 0.0; + for (int i = 0; i < size; i++) { + sum += mask[i]; + } + return sum; } + +float *multiply(const float *mask1, const float *mask2, size_t size) { + double sum = 0.0; + float *multipliedMask = (float *)malloc(size * sizeof(float)); + if (!multipliedMask) { + exit(-1); + } + for (int i = 0; i < size; i++) { + multipliedMask[i] = mask1[i] * mask2[i]; + } + + return multipliedMask; +} + +double softIOU(const float *mask1, const float *mask2, size_t size) { + float *interSectionVector = multiply(mask1, mask2, size); + double interSectionSum = sum(interSectionVector, size); + free(interSectionVector); + + float *m1m1Vector = multiply(mask1, mask1, size); + double m1m1 = sum(m1m1Vector, size); + free(m1m1Vector); + + float *m2m2Vector = multiply(mask2, mask2, size); + double m2m2 = sum(m2m2Vector, size); + free(m2m2Vector); + + double unionSum = m1m1 + m2m2 - interSectionSum; + + return unionSum > 0.0 ? interSectionSum / unionSum : 0.0; +} + +@interface MPPImageSegmenterTests : XCTestCase + @end @implementation MPPImageSegmenterTests #pragma mark General Tests -- (MPPImage *)imageWithFileInfo:(ResourceFileInfo *)fileInfo { - UIImageOrientation orientation = (UIImageOrientation)[fileInfo[@"orientation"] intValue]; - MPPImage *image = [MPPImage imageFromBundleWithClass:[MPPImageSegmenterTests class] - fileName:fileInfo[@"name"] - ofType:fileInfo[@"type"] - orientation:orientation]; - XCTAssertNotNil(image); - return image; +- (void)setUp { + // When expected and actual mask sizes are not equal, iterating through mask data results in a + // segmentation fault. Setting this property to `NO`, prevents each test case from executing the + // remaining flow after a failure. Since expected and actual mask sizes are compared before + // iterating through them, this prevents any illegal memory access. + self.continueAfterFailure = NO; } -+ (NSString *)filePathWithName:(NSString *)fileName extension:(NSString *)extension { ++ (NSString *)filePathWithName : (NSString *)fileName extension : (NSString *)extension { NSString *filePath = [[NSBundle bundleForClass:[MPPImageSegmenterTests class]] pathForResource:fileName ofType:extension]; return filePath; } -// - (void)testCreateImageSegmenterWithMissingModelPathFails { -// NSString *modelPath = [MPPFaceLandmarkerTests filePathWithName:@"" extension:@""]; - -// NSError *error = nil; -// MPPFaceLandmarker *faceLandmarker = [[MPPFaceLandmarker alloc] initWithModelPath:modelPath -// error:&error]; -// XCTAssertNil(faceLandmarker); - -// NSError *expectedError = [NSError -// errorWithDomain:kExpectedErrorDomain -// code:MPPTasksErrorCodeInvalidArgumentError -// userInfo:@{ -// NSLocalizedDescriptionKey : -// @"INVALID_ARGUMENT: ExternalFile must specify at least one of 'file_content', " -// @"'file_name', 'file_pointer_meta' or 'file_descriptor_meta'." -// }]; -// AssertEqualErrors(error, expectedError); -// } - #pragma mark Image Mode Tests -- (void)testDetectWithImageModeAndPotraitSucceeds { - NSString *modelPath = [MPPImageSegmenterTests filePathWithName:kImageSegmenterModelName - extension:@"tflite"]; - MPPImageSegmenter *imageSegmenter = [[MPPImageSegmenter alloc] initWithModelPath:modelPath - error:nil]; +- (void)testSegmentWithCategoryMaskSucceeds { + MPPImageSegmenterOptions *options = + [self imageSegmenterOptionsWithModelFileInfo:kImageSegmenterModel]; + options.shouldOutputConfidenceMasks = NO; + options.shouldOutputCategoryMask = YES; - MPPImage *image = [self imageWithFileInfo:kCatImage]; - MPPImageSegmenterResult *result = [imageSegmenter segmentImage:image error:nil]; - // NSArray *expectedLandmarks = - // [MPPFaceLandmarkerTests expectedLandmarksFromFileInfo:kPortraitExpectedLandmarksName]; - // [self assertResultsOfDetectInImageWithFileInfo:kPortraitImage - // usingFaceLandmarker:faceLandmarker - // containsExpectedLandmarks:expectedLandmarks - // expectedBlendshapes:NULL - // expectedTransformationMatrix:NULL]; + MPPImageSegmenter *imageSegmenter = [self createImageSegmenterWithOptionsSucceeds:options]; + + [self assertResultsOfSegmentImageWithFileInfo:kSegmentationImageFileInfo + usingImageSegmenter:imageSegmenter + approximatelyEqualsExpectedCategoryMaskImageWithFileInfo:kSegmentationGoldenImageFileInfo + shouldHaveConfidenceMasks:NO]; +} + +- (void)testSegmentWithConfidenceMaskSucceeds { + MPPImageSegmenterOptions *options = + [self imageSegmenterOptionsWithModelFileInfo:kImageSegmenterModel]; + + MPPImageSegmenter *imageSegmenter = [self createImageSegmenterWithOptionsSucceeds:options]; + + [self assertResultsOfSegmentImageWithFileInfo:kCatImageFileInfo + usingImageSegmenter:imageSegmenter + approximatelyEqualsExpectedConfidenceMaskImageWithFileInfo:kCatGoldenImageFileInfo + atIndex:8 + shouldHaveCategoryMask:NO]; +} + +#pragma mark - Image Segmenter Initializers + +- (MPPImageSegmenterOptions *)imageSegmenterOptionsWithModelFileInfo:(MPPFileInfo *)fileInfo { + MPPImageSegmenterOptions *options = [[MPPImageSegmenterOptions alloc] init]; + options.baseOptions.modelAssetPath = fileInfo.path; + return options; +} + +- (MPPImageSegmenter *)createImageSegmenterWithOptionsSucceeds:(MPPImageSegmenterOptions *)options { + NSError *error; + MPPImageSegmenter *imageSegmenter = [[MPPImageSegmenter alloc] initWithOptions:options + error:&error]; + XCTAssertNotNil(imageSegmenter); + XCTAssertNil(error); + + return imageSegmenter; +} + +#pragma mark Assert Segmenter Results +- (void)assertResultsOfSegmentImageWithFileInfo:(MPPFileInfo *)imageFileInfo + usingImageSegmenter:(MPPImageSegmenter *)imageSegmenter + approximatelyEqualsExpectedCategoryMaskImageWithFileInfo: + (MPPFileInfo *)expectedCategoryMaskFileInfo + shouldHaveConfidenceMasks:(BOOL)shouldHaveConfidenceMasks { + MPPImageSegmenterResult *result = [self segmentImageWithFileInfo:imageFileInfo + usingImageSegmenter:imageSegmenter]; + + XCTAssertNotNil(result.categoryMask); + shouldHaveConfidenceMasks ? XCTAssertNotNil(result.confidenceMasks) + : XCTAssertNil(result.confidenceMasks); + + [self assertCategoryMask:result.categoryMask + approximatelyEqualsExpectedCategoryMaskImageWithFileInfo:expectedCategoryMaskFileInfo]; +} + +- (void)assertResultsOfSegmentImageWithFileInfo:(MPPFileInfo *)imageFileInfo + usingImageSegmenter:(MPPImageSegmenter *)imageSegmenter + approximatelyEqualsExpectedConfidenceMaskImageWithFileInfo: + (MPPFileInfo *)expectedConfidenceMaskFileInfo + atIndex:(NSInteger)index + shouldHaveCategoryMask:(BOOL)shouldHaveCategoryMask { + MPPImageSegmenterResult *result = [self segmentImageWithFileInfo:imageFileInfo + usingImageSegmenter:imageSegmenter]; + + XCTAssertNotNil(result.confidenceMasks); + shouldHaveCategoryMask ? XCTAssertNotNil(result.categoryMask) : XCTAssertNil(result.categoryMask); + + XCTAssertLessThan(index, result.confidenceMasks.count); + + [self assertConfidenceMask:result.confidenceMasks[index] + approximatelyEqualsExpectedConfidenceMaskImageWithFileInfo:expectedConfidenceMaskFileInfo]; +} + +- (MPPImageSegmenterResult *)segmentImageWithFileInfo:(MPPFileInfo *)fileInfo + usingImageSegmenter:(MPPImageSegmenter *)imageSegmenter { + MPPImage *image = [MPPImage imageWithFileInfo:fileInfo]; + XCTAssertNotNil(image); + + NSError *error; + MPPImageSegmenterResult *result = [imageSegmenter segmentImage:image error:&error]; + XCTAssertNil(error); + XCTAssertNotNil(result); + + return result; +} + +- (void)assertCategoryMask:(MPPMask *)categoryMask + approximatelyEqualsExpectedCategoryMaskImageWithFileInfo: + (MPPFileInfo *)expectedCategoryMaskImageFileInfo { + MPPMask *expectedCategoryMask = + [[MPPMask alloc] initWithImageFileInfo:expectedCategoryMaskImageFileInfo]; + + XCTAssertEqual(categoryMask.width, expectedCategoryMask.width); + XCTAssertEqual(categoryMask.height, expectedCategoryMask.height); + + size_t maskSize = categoryMask.width * categoryMask.height; + + const UInt8 *categoryMaskPixelData = categoryMask.uint8Data; + const UInt8 *expectedCategoryMaskPixelData = expectedCategoryMask.uint8Data; + + NSInteger consistentPixels = 0; + + for (int i = 0; i < maskSize; i++) { + consistentPixels += + categoryMaskPixelData[i] * kMagnificationFactor == expectedCategoryMaskPixelData[i] ? 1 : 0; + } + + XCTAssertGreaterThan((float)consistentPixels / (float)maskSize, kSimilarityThreshold); +} + +- (void)assertConfidenceMask:(MPPMask *)confidenceMask + approximatelyEqualsExpectedConfidenceMaskImageWithFileInfo: + (MPPFileInfo *)expectedConfidenceMaskImageFileInfo { + MPPMask *expectedConfidenceMask = + [[MPPMask alloc] initWithImageFileInfo:expectedConfidenceMaskImageFileInfo]; + + XCTAssertEqual(confidenceMask.width, expectedConfidenceMask.width); + XCTAssertEqual(confidenceMask.height, expectedConfidenceMask.height); + + size_t maskSize = confidenceMask.width * confidenceMask.height; + + XCTAssertGreaterThan( + softIOU(confidenceMask.float32Data, expectedConfidenceMask.float32Data, maskSize), + kSimilarityThreshold); } @end - -// - (void)testDetectWithImageModeAndPotraitAndFacialTransformationMatrixesSucceeds { -// MPPFaceLandmarkerOptions *options = -// [self faceLandmarkerOptionsWithModelName:kFaceLandmarkerModelName]; -// options.outputFacialTransformationMatrixes = YES; -// MPPFaceLandmarker *faceLandmarker = [[MPPFaceLandmarker alloc] initWithOptions:options error:nil]; - -// NSArray *expectedLandmarks = -// [MPPFaceLandmarkerTests expectedLandmarksFromFileInfo:kPortraitExpectedLandmarksName]; -// MPPTransformMatrix *expectedTransformationMatrix = [MPPFaceLandmarkerTests -// expectedTransformationMatrixFromFileInfo:kPortraitExpectedGeometryName]; -// [self assertResultsOfDetectInImageWithFileInfo:kPortraitImage -// usingFaceLandmarker:faceLandmarker -// containsExpectedLandmarks:expectedLandmarks -// expectedBlendshapes:NULL -// expectedTransformationMatrix:expectedTransformationMatrix]; -// } - -// - (void)testDetectWithImageModeAndNoFaceSucceeds { -// NSString *modelPath = [MPPFaceLandmarkerTests filePathWithName:kFaceLandmarkerModelName -// extension:@"task"]; -// MPPFaceLandmarker *faceLandmarker = [[MPPFaceLandmarker alloc] initWithModelPath:modelPath -// error:nil]; -// XCTAssertNotNil(faceLandmarker); - -// NSError *error; -// MPPImage *mppImage = [self imageWithFileInfo:kCatImage]; -// MPPFaceLandmarkerResult *faceLandmarkerResult = [faceLandmarker detectInImage:mppImage -// error:&error]; -// XCTAssertNil(error); -// XCTAssertNotNil(faceLandmarkerResult); -// XCTAssertEqualObjects(faceLandmarkerResult.faceLandmarks, [NSArray array]); -// XCTAssertEqualObjects(faceLandmarkerResult.faceBlendshapes, [NSArray array]); -// XCTAssertEqualObjects(faceLandmarkerResult.facialTransformationMatrixes, [NSArray array]); -// } - -// #pragma mark Video Mode Tests - -// - (void)testDetectWithVideoModeAndPotraitSucceeds { -// MPPFaceLandmarkerOptions *options = -// [self faceLandmarkerOptionsWithModelName:kFaceLandmarkerModelName]; -// options.runningMode = MPPRunningModeVideo; -// MPPFaceLandmarker *faceLandmarker = [[MPPFaceLandmarker alloc] initWithOptions:options error:nil]; - -// MPPImage *image = [self imageWithFileInfo:kPortraitImage]; -// NSArray *expectedLandmarks = -// [MPPFaceLandmarkerTests expectedLandmarksFromFileInfo:kPortraitExpectedLandmarksName]; -// for (int i = 0; i < 3; i++) { -// MPPFaceLandmarkerResult *faceLandmarkerResult = [faceLandmarker detectInVideoFrame:image -// timestampInMilliseconds:i -// error:nil]; -// [self assertFaceLandmarkerResult:faceLandmarkerResult -// containsExpectedLandmarks:expectedLandmarks -// expectedBlendshapes:NULL -// expectedTransformationMatrix:NULL]; -// } -// } - -// #pragma mark Live Stream Mode Tests - -// - (void)testDetectWithLiveStreamModeAndPotraitSucceeds { -// NSInteger iterationCount = 100; - -// // Because of flow limiting, the callback might be invoked fewer than `iterationCount` times. An -// // normal expectation will fail if expectation.fulfill() is not called -// // `expectation.expectedFulfillmentCount` times. If `expectation.isInverted = true`, the test will -// // only succeed if expectation is not fulfilled for the specified `expectedFulfillmentCount`. -// // Since it is not possible to predict how many times the expectation is supposed to be -// // fulfilled, `expectation.expectedFulfillmentCount` = `iterationCount` + 1 and -// // `expectation.isInverted = true` ensures that test succeeds if expectation is fulfilled <= -// // `iterationCount` times. -// XCTestExpectation *expectation = [[XCTestExpectation alloc] -// initWithDescription:@"detectWithOutOfOrderTimestampsAndLiveStream"]; -// expectation.expectedFulfillmentCount = iterationCount + 1; -// expectation.inverted = YES; - -// MPPFaceLandmarkerOptions *options = -// [self faceLandmarkerOptionsWithModelName:kFaceLandmarkerModelName]; -// options.runningMode = MPPRunningModeLiveStream; -// options.faceLandmarkerLiveStreamDelegate = self; - -// MPPFaceLandmarker *faceLandmarker = [[MPPFaceLandmarker alloc] initWithOptions:options error:nil]; -// MPPImage *image = [self imageWithFileInfo:kPortraitImage]; - -// _liveStreamSucceedsTestDict = @{ -// kLiveStreamTestsDictFaceLandmarkerKey : faceLandmarker, -// kLiveStreamTestsDictExpectationKey : expectation -// }; - -// for (int i = 0; i < iterationCount; i++) { -// XCTAssertTrue([faceLandmarker detectAsyncInImage:image timestampInMilliseconds:i error:nil]); -// } - -// NSTimeInterval timeout = 0.5f; -// [self waitForExpectations:@[ expectation ] timeout:timeout]; -// } - -// - (void)testDetectWithOutOfOrderTimestampsAndLiveStreamModeFails { -// MPPFaceLandmarkerOptions *options = -// [self faceLandmarkerOptionsWithModelName:kFaceLandmarkerModelName]; -// options.runningMode = MPPRunningModeLiveStream; -// options.faceLandmarkerLiveStreamDelegate = self; - -// XCTestExpectation *expectation = [[XCTestExpectation alloc] -// initWithDescription:@"detectWithOutOfOrderTimestampsAndLiveStream"]; -// expectation.expectedFulfillmentCount = 1; - -// MPPFaceLandmarker *faceLandmarker = [[MPPFaceLandmarker alloc] initWithOptions:options error:nil]; -// _liveStreamSucceedsTestDict = @{ -// kLiveStreamTestsDictFaceLandmarkerKey : faceLandmarker, -// kLiveStreamTestsDictExpectationKey : expectation -// }; - -// MPPImage *image = [self imageWithFileInfo:kPortraitImage]; -// XCTAssertTrue([faceLandmarker detectAsyncInImage:image timestampInMilliseconds:1 error:nil]); - -// NSError *error; -// XCTAssertFalse([faceLandmarker detectAsyncInImage:image timestampInMilliseconds:0 error:&error]); - -// NSError *expectedError = -// [NSError errorWithDomain:kExpectedErrorDomain -// code:MPPTasksErrorCodeInvalidArgumentError -// userInfo:@{ -// NSLocalizedDescriptionKey : -// @"INVALID_ARGUMENT: Input timestamp must be monotonically increasing." -// }]; -// AssertEqualErrors(error, expectedError); - -// NSTimeInterval timeout = 0.5f; -// [self waitForExpectations:@[ expectation ] timeout:timeout]; -// } - -// #pragma mark Running Mode Tests - -// - (void)testCreateFaceLandmarkerFailsWithDelegateInNonLiveStreamMode { -// MPPRunningMode runningModesToTest[] = {MPPRunningModeImage, MPPRunningModeVideo}; -// for (int i = 0; i < sizeof(runningModesToTest) / sizeof(runningModesToTest[0]); i++) { -// MPPFaceLandmarkerOptions *options = -// [self faceLandmarkerOptionsWithModelName:kFaceLandmarkerModelName]; - -// options.runningMode = runningModesToTest[i]; -// options.faceLandmarkerLiveStreamDelegate = self; - -// [self -// assertCreateFaceLandmarkerWithOptions:options -// failsWithExpectedError: -// [NSError errorWithDomain:kExpectedErrorDomain -// code:MPPTasksErrorCodeInvalidArgumentError -// userInfo:@{ -// NSLocalizedDescriptionKey : -// @"The vision task is in image or video mode. The " -// @"delegate must not be set in the task's options." -// }]]; -// } -// } - -// - (void)testCreateFaceLandmarkerFailsWithMissingDelegateInLiveStreamMode { -// MPPFaceLandmarkerOptions *options = -// [self faceLandmarkerOptionsWithModelName:kFaceLandmarkerModelName]; -// options.runningMode = MPPRunningModeLiveStream; - -// [self assertCreateFaceLandmarkerWithOptions:options -// failsWithExpectedError: -// [NSError errorWithDomain:kExpectedErrorDomain -// code:MPPTasksErrorCodeInvalidArgumentError -// userInfo:@{ -// NSLocalizedDescriptionKey : -// @"The vision task is in live stream mode. An " -// @"object must be set as the delegate of the task " -// @"in its options to ensure asynchronous delivery " -// @"of results." -// }]]; -// } - -// - (void)testDetectFailsWithCallingWrongAPIInImageMode { -// MPPFaceLandmarkerOptions *options = -// [self faceLandmarkerOptionsWithModelName:kFaceLandmarkerModelName]; -// MPPFaceLandmarker *faceLandmarker = [[MPPFaceLandmarker alloc] initWithOptions:options error:nil]; - -// MPPImage *image = [self imageWithFileInfo:kPortraitImage]; - -// NSError *liveStreamAPICallError; -// XCTAssertFalse([faceLandmarker detectAsyncInImage:image -// timestampInMilliseconds:0 -// error:&liveStreamAPICallError]); - -// NSError *expectedLiveStreamAPICallError = -// [NSError errorWithDomain:kExpectedErrorDomain -// code:MPPTasksErrorCodeInvalidArgumentError -// userInfo:@{ -// NSLocalizedDescriptionKey : @"The vision task is not initialized with live " -// @"stream mode. Current Running Mode: Image" -// }]; -// AssertEqualErrors(liveStreamAPICallError, expectedLiveStreamAPICallError); - -// NSError *videoAPICallError; -// XCTAssertFalse([faceLandmarker detectInVideoFrame:image -// timestampInMilliseconds:0 -// error:&videoAPICallError]); - -// NSError *expectedVideoAPICallError = -// [NSError errorWithDomain:kExpectedErrorDomain -// code:MPPTasksErrorCodeInvalidArgumentError -// userInfo:@{ -// NSLocalizedDescriptionKey : @"The vision task is not initialized with " -// @"video mode. Current Running Mode: Image" -// }]; -// AssertEqualErrors(videoAPICallError, expectedVideoAPICallError); -// } - -// - (void)testDetectFailsWithCallingWrongAPIInVideoMode { -// MPPFaceLandmarkerOptions *options = -// [self faceLandmarkerOptionsWithModelName:kFaceLandmarkerModelName]; -// options.runningMode = MPPRunningModeVideo; - -// MPPFaceLandmarker *faceLandmarker = [[MPPFaceLandmarker alloc] initWithOptions:options error:nil]; - -// MPPImage *image = [self imageWithFileInfo:kPortraitImage]; -// NSError *liveStreamAPICallError; -// XCTAssertFalse([faceLandmarker detectAsyncInImage:image -// timestampInMilliseconds:0 -// error:&liveStreamAPICallError]); - -// NSError *expectedLiveStreamAPICallError = -// [NSError errorWithDomain:kExpectedErrorDomain -// code:MPPTasksErrorCodeInvalidArgumentError -// userInfo:@{ -// NSLocalizedDescriptionKey : @"The vision task is not initialized with live " -// @"stream mode. Current Running Mode: Video" -// }]; -// AssertEqualErrors(liveStreamAPICallError, expectedLiveStreamAPICallError); - -// NSError *imageAPICallError; -// XCTAssertFalse([faceLandmarker detectInImage:image error:&imageAPICallError]); - -// NSError *expectedImageAPICallError = -// [NSError errorWithDomain:kExpectedErrorDomain -// code:MPPTasksErrorCodeInvalidArgumentError -// userInfo:@{ -// NSLocalizedDescriptionKey : @"The vision task is not initialized with " -// @"image mode. Current Running Mode: Video" -// }]; -// AssertEqualErrors(imageAPICallError, expectedImageAPICallError); -// } - -// - (void)testDetectFailsWithCallingWrongAPIInLiveStreamMode { -// MPPFaceLandmarkerOptions *options = -// [self faceLandmarkerOptionsWithModelName:kFaceLandmarkerModelName]; -// options.runningMode = MPPRunningModeLiveStream; -// options.faceLandmarkerLiveStreamDelegate = self; -// MPPFaceLandmarker *faceLandmarker = [[MPPFaceLandmarker alloc] initWithOptions:options error:nil]; - -// MPPImage *image = [self imageWithFileInfo:kPortraitImage]; - -// NSError *imageAPICallError; -// XCTAssertFalse([faceLandmarker detectInImage:image error:&imageAPICallError]); - -// NSError *expectedImageAPICallError = -// [NSError errorWithDomain:kExpectedErrorDomain -// code:MPPTasksErrorCodeInvalidArgumentError -// userInfo:@{ -// NSLocalizedDescriptionKey : @"The vision task is not initialized with " -// @"image mode. Current Running Mode: Live Stream" -// }]; -// AssertEqualErrors(imageAPICallError, expectedImageAPICallError); - -// NSError *videoAPICallError; -// XCTAssertFalse([faceLandmarker detectInVideoFrame:image -// timestampInMilliseconds:0 -// error:&videoAPICallError]); - -// NSError *expectedVideoAPICallError = -// [NSError errorWithDomain:kExpectedErrorDomain -// code:MPPTasksErrorCodeInvalidArgumentError -// userInfo:@{ -// NSLocalizedDescriptionKey : @"The vision task is not initialized with " -// @"video mode. Current Running Mode: Live Stream" -// }]; -// AssertEqualErrors(videoAPICallError, expectedVideoAPICallError); -// } - -// #pragma mark MPPFaceLandmarkerLiveStreamDelegate Methods -// - (void)faceLandmarker:(MPPFaceLandmarker *)faceLandmarker -// didFinishDetectionWithResult:(MPPFaceLandmarkerResult *)faceLandmarkerResult -// timestampInMilliseconds:(NSInteger)timestampInMilliseconds -// error:(NSError *)error { -// NSArray *expectedLandmarks = -// [MPPFaceLandmarkerTests expectedLandmarksFromFileInfo:kPortraitExpectedLandmarksName]; -// [self assertFaceLandmarkerResult:faceLandmarkerResult -// containsExpectedLandmarks:expectedLandmarks -// expectedBlendshapes:NULL -// expectedTransformationMatrix:NULL]; - -// if (faceLandmarker == _outOfOrderTimestampTestDict[kLiveStreamTestsDictFaceLandmarkerKey]) { -// [_outOfOrderTimestampTestDict[kLiveStreamTestsDictExpectationKey] fulfill]; -// } else if (faceLandmarker == _liveStreamSucceedsTestDict[kLiveStreamTestsDictFaceLandmarkerKey]) { -// [_liveStreamSucceedsTestDict[kLiveStreamTestsDictExpectationKey] fulfill]; -// } -// } - -// + (NSString *)filePathWithName:(NSString *)fileName extension:(NSString *)extension { -// NSString *filePath = -// [[NSBundle bundleForClass:[MPPFaceLandmarkerTests class]] pathForResource:fileName -// ofType:extension]; -// return filePath; -// } - -// + (NSArray *)expectedLandmarksFromFileInfo:(NSDictionary *)fileInfo { -// NSString *filePath = [self filePathWithName:fileInfo[@"name"] extension:fileInfo[@"type"]]; -// NormalizedLandmarkListProto proto; -// if (!get_proto_from_pbtxt([filePath UTF8String], proto).ok()) { -// return nil; -// } -// NSMutableArray *landmarks = -// [NSMutableArray arrayWithCapacity:(NSUInteger)proto.landmark_size()]; -// for (const auto &landmarkProto : proto.landmark()) { -// [landmarks addObject:[MPPNormalizedLandmark normalizedLandmarkWithProto:landmarkProto]]; -// } -// return landmarks; -// } - -// + (MPPClassifications *)expectedBlendshapesFromFileInfo:(NSDictionary *)fileInfo { -// NSString *filePath = [self filePathWithName:fileInfo[@"name"] extension:fileInfo[@"type"]]; -// ClassificationListProto proto; -// if (!get_proto_from_pbtxt([filePath UTF8String], proto).ok()) { -// return nil; -// } -// return [MPPClassifications classificationsWithClassificationListProto:proto -// headIndex:0 -// headName:[NSString string]]; -// } - -// + (MPPTransformMatrix *)expectedTransformationMatrixFromFileInfo:(NSDictionary *)fileInfo { -// NSString *filePath = [self filePathWithName:fileInfo[@"name"] extension:fileInfo[@"type"]]; -// FaceGeometryProto proto; -// if (!get_proto_from_pbtxt([filePath UTF8String], proto).ok()) { -// return nil; -// } -// return [[MPPTransformMatrix alloc] initWithData:proto.pose_transform_matrix().packed_data().data() -// rows:proto.pose_transform_matrix().rows() -// columns:proto.pose_transform_matrix().cols()]; -// } - -// - (void)assertFaceLandmarkerResult:(MPPFaceLandmarkerResult *)faceLandmarkerResult -// containsExpectedLandmarks:(NSArray *)expectedLandmarks -// expectedBlendshapes:(nullable MPPClassifications *)expectedBlendshapes -// expectedTransformationMatrix:(nullable MPPTransformMatrix *)expectedTransformationMatrix { -// NSArray *landmarks = faceLandmarkerResult.faceLandmarks[0]; -// XCTAssertEqual(landmarks.count, expectedLandmarks.count); -// for (int i = 0; i < landmarks.count; ++i) { -// XCTAssertEqualWithAccuracy(landmarks[i].x, expectedLandmarks[i].x, kLandmarkErrorThreshold, -// @"index i = %d", i); -// XCTAssertEqualWithAccuracy(landmarks[i].y, expectedLandmarks[i].y, kLandmarkErrorThreshold, -// @"index i = %d", i); -// } - -// if (expectedBlendshapes == NULL) { -// XCTAssertEqualObjects(faceLandmarkerResult.faceBlendshapes, [NSArray array]); -// } else { -// MPPClassifications *blendshapes = faceLandmarkerResult.faceBlendshapes[0]; -// NSArray *actualCategories = blendshapes.categories; -// NSArray *expectedCategories = expectedBlendshapes.categories; -// XCTAssertEqual(actualCategories.count, expectedCategories.count); -// for (int i = 0; i < actualCategories.count; ++i) { -// XCTAssertEqual(actualCategories[i].index, expectedCategories[i].index, @"index i = %d", i); -// XCTAssertEqualWithAccuracy(actualCategories[i].score, expectedCategories[i].score, -// kBlendshapesErrorThreshold, @"index i = %d", i); -// XCTAssertEqualObjects(actualCategories[i].categoryName, expectedCategories[i].categoryName, -// @"index i = %d", i); -// XCTAssertEqualObjects(actualCategories[i].displayName, expectedCategories[i].displayName, -// @"index i = %d", i); -// } -// } - -// if (expectedTransformationMatrix == NULL) { -// XCTAssertEqualObjects(faceLandmarkerResult.facialTransformationMatrixes, [NSArray array]); -// } else { -// MPPTransformMatrix *actualTransformationMatrix = -// faceLandmarkerResult.facialTransformationMatrixes[0]; -// XCTAssertEqual(actualTransformationMatrix.rows, expectedTransformationMatrix.rows); -// XCTAssertEqual(actualTransformationMatrix.columns, expectedTransformationMatrix.columns); -// for (int i = 0; i < actualTransformationMatrix.rows * actualTransformationMatrix.columns; ++i) { -// XCTAssertEqualWithAccuracy(actualTransformationMatrix.data[i], -// expectedTransformationMatrix.data[i], -// kFacialTransformationMatrixErrorThreshold, @"index i = %d", i); -// } -// } -// } - -// #pragma mark Face Landmarker Initializers - -// - (MPPFaceLandmarkerOptions *)faceLandmarkerOptionsWithModelName:(NSString *)modelName { -// NSString *modelPath = [MPPFaceLandmarkerTests filePathWithName:modelName extension:@"task"]; -// MPPFaceLandmarkerOptions *faceLandmarkerOptions = [[MPPFaceLandmarkerOptions alloc] init]; -// faceLandmarkerOptions.baseOptions.modelAssetPath = modelPath; -// return faceLandmarkerOptions; -// } - -// - (void)assertCreateFaceLandmarkerWithOptions:(MPPFaceLandmarkerOptions *)faceLandmarkerOptions -// failsWithExpectedError:(NSError *)expectedError { -// NSError *error = nil; -// MPPFaceLandmarker *faceLandmarker = -// [[MPPFaceLandmarker alloc] initWithOptions:faceLandmarkerOptions error:&error]; -// XCTAssertNil(faceLandmarker); -// AssertEqualErrors(error, expectedError); -// } - -// #pragma mark Assert Detection Results - -// - (MPPImage *)imageWithFileInfo:(ResourceFileInfo *)fileInfo { -// UIImageOrientation orientation = (UIImageOrientation)[fileInfo[@"orientation"] intValue]; -// MPPImage *image = [MPPImage imageFromBundleWithClass:[MPPFaceLandmarkerTests class] -// fileName:fileInfo[@"name"] -// ofType:fileInfo[@"type"] -// orientation:orientation]; -// XCTAssertNotNil(image); -// return image; -// } - -// - (void)assertResultsOfDetectInImageWithFileInfo:(ResourceFileInfo *)fileInfo -// usingFaceLandmarker:(MPPFaceLandmarker *)faceLandmarker -// containsExpectedLandmarks: -// (NSArray *)expectedLandmarks -// expectedBlendshapes:(nullable MPPClassifications *)expectedBlendshapes -// expectedTransformationMatrix: -// (nullable MPPTransformMatrix *)expectedTransformationMatrix { -// MPPImage *mppImage = [self imageWithFileInfo:fileInfo]; - -// NSError *error; -// MPPFaceLandmarkerResult *faceLandmarkerResult = [faceLandmarker detectInImage:mppImage -// error:&error]; -// XCTAssertNil(error); -// XCTAssertNotNil(faceLandmarkerResult); - -// [self assertFaceLandmarkerResult:faceLandmarkerResult -// containsExpectedLandmarks:expectedLandmarks -// expectedBlendshapes:expectedBlendshapes -// expectedTransformationMatrix:expectedTransformationMatrix]; -// } - -// @end From 15c8a25dc375fec1a94132dc89a7b5c5c9973174 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Wed, 6 Sep 2023 18:37:55 +0530 Subject: [PATCH 8/9] Removed unwanted files from image segmenter tests --- .../test/vision/image_segmenter/utils/BUILD | 55 --------- .../MPPFaceLandmarkerResult+HelpersTests.mm | 112 ------------------ 2 files changed, 167 deletions(-) delete mode 100644 mediapipe/tasks/ios/test/vision/image_segmenter/utils/BUILD delete mode 100644 mediapipe/tasks/ios/test/vision/image_segmenter/utils/sources/MPPFaceLandmarkerResult+HelpersTests.mm diff --git a/mediapipe/tasks/ios/test/vision/image_segmenter/utils/BUILD b/mediapipe/tasks/ios/test/vision/image_segmenter/utils/BUILD deleted file mode 100644 index 74f2bd11a..000000000 --- a/mediapipe/tasks/ios/test/vision/image_segmenter/utils/BUILD +++ /dev/null @@ -1,55 +0,0 @@ -load("@build_bazel_rules_apple//apple:ios.bzl", "ios_unit_test") -load( - "//mediapipe/framework/tool:ios.bzl", - "MPP_TASK_MINIMUM_OS_VERSION", -) -load( - "@org_tensorflow//tensorflow/lite:special_rules.bzl", - "tflite_ios_lab_runner", -) - -package(default_visibility = ["//mediapipe/tasks:internal"]) - -licenses(["notice"]) - -# Default tags for filtering iOS targets. Targets are restricted to Apple platforms. -TFL_DEFAULT_TAGS = [ - "apple", -] - -# Following sanitizer tests are not supported by iOS test targets. -TFL_DISABLED_SANITIZER_TAGS = [ - "noasan", - "nomsan", - "notsan", -] - -objc_library( - name = "MPPFaceLandmarkeResultHelpersTestLibary", - testonly = 1, - srcs = ["sources/MPPFaceLandmarkerResult+HelpersTests.mm"], - copts = [ - "-ObjC++", - "-std=c++17", - "-x objective-c++", - ], - deps = [ - "//mediapipe/framework:packet", - "//mediapipe/framework/formats:classification_cc_proto", - "//mediapipe/framework/formats:landmark_cc_proto", - "//mediapipe/framework/formats:matrix_data_cc_proto", - "//mediapipe/tasks/cc/vision/face_geometry/proto:face_geometry_cc_proto", - "//mediapipe/tasks/ios/vision/face_landmarker:MPPFaceLandmarkerResult", - "//mediapipe/tasks/ios/vision/face_landmarker/utils:MPPFaceLandmarkerResultHelpers", - ], -) - -ios_unit_test( - name = "MPPFaceLandmarkeResultHelpersTest", - minimum_os_version = MPP_TASK_MINIMUM_OS_VERSION, - runner = tflite_ios_lab_runner("IOS_LATEST"), - tags = TFL_DEFAULT_TAGS + TFL_DISABLED_SANITIZER_TAGS, - deps = [ - ":MPPFaceLandmarkeResultHelpersTestLibary", - ], -) diff --git a/mediapipe/tasks/ios/test/vision/image_segmenter/utils/sources/MPPFaceLandmarkerResult+HelpersTests.mm b/mediapipe/tasks/ios/test/vision/image_segmenter/utils/sources/MPPFaceLandmarkerResult+HelpersTests.mm deleted file mode 100644 index 3572aa47e..000000000 --- a/mediapipe/tasks/ios/test/vision/image_segmenter/utils/sources/MPPFaceLandmarkerResult+HelpersTests.mm +++ /dev/null @@ -1,112 +0,0 @@ -// Copyright 2023 The MediaPipe Authors. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#import - -#include "mediapipe/framework/formats/classification.pb.h" -#include "mediapipe/framework/formats/landmark.pb.h" -#include "mediapipe/framework/formats/matrix_data.pb.h" -#include "mediapipe/framework/packet.h" -#include "mediapipe/tasks/cc/vision/face_geometry/proto/face_geometry.pb.h" -#import "mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerResult.h" -#import "mediapipe/tasks/ios/vision/face_landmarker/utils/sources/MPPFaceLandmarkerResult+Helpers.h" - -using ::mediapipe::MakePacket; -using ::mediapipe::Packet; -using ::mediapipe::Timestamp; -using NormalizedLandmarkListProto = ::mediapipe::NormalizedLandmarkList; -using ClassificationListProto = ::mediapipe::ClassificationList; -using FaceGeometryProto = ::mediapipe::tasks::vision::face_geometry::proto::FaceGeometry; - -static constexpr int kMicrosecondsPerMillisecond = 1000; - -@interface MPPLandmarkerResultHelpersTests : XCTestCase { -} -@end - -@implementation MPPLandmarkerResultHelpersTests - -- (void)testCreatesResultFromLandmarkerPackets { - const std::vector normalizedLandmarkProtos({{}}); - const std::vector classificationProtos({{}}); - const std::vector faceGeometryProto({{}}); - - const auto landmarksPacket = - MakePacket>(normalizedLandmarkProtos) - .At(Timestamp(42 * kMicrosecondsPerMillisecond)); - const auto classificationsPacket = - MakePacket>(classificationProtos) - .At(Timestamp(42 * kMicrosecondsPerMillisecond)); - const auto faceGeometryPacket = MakePacket>(faceGeometryProto) - .At(Timestamp(42 * kMicrosecondsPerMillisecond)); - - MPPFaceLandmarkerResult *results = - [MPPFaceLandmarkerResult faceLandmarkerResultWithLandmarksPacket:landmarksPacket - blendshapesPacket:classificationsPacket - transformationMatrixesPacket:faceGeometryPacket]; - - XCTAssertEqual(results.faceLandmarks.count, 1); - XCTAssertEqual(results.faceBlendshapes.count, 1); - XCTAssertEqual(results.facialTransformationMatrixes.count, 1); - XCTAssertEqual(results.timestampInMilliseconds, 42); -} - -- (void)testCreatesCreatesCopyOfFacialTransformationMatrix { - MPPFaceLandmarkerResult *results; - - { - // Create scope so that FaceGeometryProto gets deallocated before we access the - // MPPFaceLandmarkerResult. - FaceGeometryProto faceGeometryProto{}; - auto *matrixData = faceGeometryProto.mutable_pose_transform_matrix(); - matrixData->set_cols(4); - matrixData->set_rows(4); - for (size_t i = 0; i < 4 * 4; ++i) { - matrixData->add_packed_data(0.1f * i); - } - - const std::vector faceGeometryProtos({faceGeometryProto}); - const auto faceGeometryPacket = MakePacket>(faceGeometryProtos); - results = [MPPFaceLandmarkerResult faceLandmarkerResultWithLandmarksPacket:{} - blendshapesPacket:{} - transformationMatrixesPacket:faceGeometryPacket]; - } - - XCTAssertEqual(results.facialTransformationMatrixes.count, 1); - XCTAssertEqual(results.facialTransformationMatrixes[0].rows, 4); - XCTAssertEqual(results.facialTransformationMatrixes[0].columns, 4); - for (size_t column = 0; column < 4; ++column) { - for (size_t row = 0; row < 4; ++row) { - XCTAssertEqualWithAccuracy( - [results.facialTransformationMatrixes[0] valueAtRow:row column:column], - 0.4f * row + 0.1f * column, /* accuracy= */ 0.0001f, @"at [%zu,%zu]", column, row); - } - } -} - -- (void)testCreatesResultFromEmptyPackets { - const Packet emptyPacket = Packet{}.At(Timestamp(0)); - MPPFaceLandmarkerResult *results = - [MPPFaceLandmarkerResult faceLandmarkerResultWithLandmarksPacket:emptyPacket - blendshapesPacket:emptyPacket - transformationMatrixesPacket:emptyPacket]; - - NSArray *emptyArray = [NSArray array]; - XCTAssertEqualObjects(results.faceLandmarks, emptyArray); - XCTAssertEqualObjects(results.faceBlendshapes, emptyArray); - XCTAssertEqualObjects(results.facialTransformationMatrixes, emptyArray); - XCTAssertEqual(results.timestampInMilliseconds, 0); -} - -@end From d42bf846aa4f3fca2f63bd51a66c54d8c98da0e3 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Tue, 12 Sep 2023 16:41:45 +0530 Subject: [PATCH 9/9] Updated multiple function in iOS Image Segmenter to use vector --- .../vision/image_segmenter/MPPImageSegmenterTests.mm | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/mediapipe/tasks/ios/test/vision/image_segmenter/MPPImageSegmenterTests.mm b/mediapipe/tasks/ios/test/vision/image_segmenter/MPPImageSegmenterTests.mm index 9df89ad2a..3ec02f365 100644 --- a/mediapipe/tasks/ios/test/vision/image_segmenter/MPPImageSegmenterTests.mm +++ b/mediapipe/tasks/ios/test/vision/image_segmenter/MPPImageSegmenterTests.mm @@ -44,14 +44,14 @@ double sum(const float *mask, size_t size) { float *multiply(const float *mask1, const float *mask2, size_t size) { double sum = 0.0; - float *multipliedMask = (float *)malloc(size * sizeof(float)); - if (!multipliedMask) { - exit(-1); - } + + std::vector multipliedMask; + multipliedMask.reserve(size); + for (int i = 0; i < size; i++) { - multipliedMask[i] = mask1[i] * mask2[i]; + multipliedMask.push_back(mask1[i] * mask2[i]); } - + return multipliedMask; }