From 746f466c3a55a4947fc1a32bdbbf38d2f0110f1c Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Wed, 31 May 2023 11:28:50 +0530 Subject: [PATCH 001/106] Added iOS Gesture Recognizer Protobuf utils --- .../vision/gesture_recognizer/utils/BUILD | 21 ++++++ .../MPPGestureRecognizerResult+ProtoHelpers.h | 28 ++++++++ ...MPPGestureRecognizerResult+ProtoHelpers.mm | 66 +++++++++++++++++++ 3 files changed, 115 insertions(+) create mode 100644 mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/BUILD create mode 100644 mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtoHelpers.h create mode 100644 mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtoHelpers.mm diff --git a/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/BUILD b/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/BUILD new file mode 100644 index 000000000..ddac21ed2 --- /dev/null +++ b/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/BUILD @@ -0,0 +1,21 @@ +package(default_visibility = ["//mediapipe/tasks:internal"]) + +licenses(["notice"]) + +objc_library( + name = "MPPGestureRecognizerResultProtoHelpers", + srcs = ["sources/MPPGestureRecognizerResult+ProtoHelpers.mm"], + hdrs = ["sources/MPPGestureRecognizerResult+ProtoHelpers.h"], + copts = [ + "-ObjC++", + "-std=c++17", + "-x objective-c++", + ], + deps = [ + "//mediapipe/tasks/ios/test/vision/utils:parse_proto_utils", + "//mediapipe/framework/formats:classification_cc_proto", + "//mediapipe/tasks/cc/components/containers/proto:landmarks_detection_result_cc_proto", + "//mediapipe/tasks/ios/vision/gesture_recognizer/utils:MPPGestureRecognizerResultHelpers", + "//mediapipe/tasks/ios/common/utils:NSStringHelpers", + ], +) diff --git a/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtoHelpers.h b/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtoHelpers.h new file mode 100644 index 000000000..6bb2e5182 --- /dev/null +++ b/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtoHelpers.h @@ -0,0 +1,28 @@ +// Copyright 2022 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import +#import "mediapipe/tasks/ios/vision/gesture_recognizer/sources/MPPGestureRecognizerResult.h" + +NS_ASSUME_NONNULL_BEGIN +@interface MPPGestureRecognizerResult (ProtoHelpers) + ++ (MPPGestureRecognizerResult *) + gestureRecognizerResultsFromTextEncodedProtobufFileWithName:(NSString *)fileName + gestureLabel:(NSString *)gestureLabel + shouldRemoveZPosition:(BOOL)removeZPosition; + +@end + +NS_ASSUME_NONNULL_END diff --git a/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtoHelpers.mm b/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtoHelpers.mm new file mode 100644 index 000000000..ce3a262b8 --- /dev/null +++ b/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtoHelpers.mm @@ -0,0 +1,66 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import "mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtoHelpers.h" + +#import "mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.h" +#import "mediapipe/tasks/ios/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+Helpers.h" + +#include "mediapipe/framework/formats/classification.pb.h" +#include "mediapipe/tasks/cc/components/containers/proto/landmarks_detection_result.pb.h" +#include "mediapipe/tasks/ios/test/vision/utils/sources/parse_proto_utils.h" + +namespace { +using ClassificationListProto = ::mediapipe::ClassificationList; +using ClassificationProto = ::mediapipe::Classification; +using LandmarksDetectionResultProto = + ::mediapipe::tasks::containers::proto::LandmarksDetectionResult; +using ::mediapipe::tasks::ios::test::vision::utils::get_proto_from_pbtxt; +} // anonymous namespace + +@implementation MPPGestureRecognizerResult (ProtoHelpers) + ++ (MPPGestureRecognizerResult *) + gestureRecognizerResultsFromTextEncodedProtobufFileWithName:(NSString *)fileName + gestureLabel:(NSString *)gestureLabel + shouldRemoveZPosition:(BOOL)removeZPosition { + LandmarksDetectionResultProto landmarkDetectionResultProto; + + if (!get_proto_from_pbtxt(fileName.cppString, landmarkDetectionResultProto).ok()) { + return nil; + } + + if (removeZPosition) { + // Remove z position of landmarks, because they are not used in correctness + // testing. For video or live stream mode, the z positions varies a lot during + // tracking from frame to frame. + for (int i = 0; i < landmarkDetectionResultProto.landmarks().landmark().size(); i++) { + auto &landmark = *landmarkDetectionResultProto.mutable_landmarks()->mutable_landmark(i); + landmark.clear_z(); + } + } + + ClassificationListProto gesturesProto; + ClassificationProto *classificationProto = gesturesProto.add_classification(); + classificationProto->set_label([gestureLabel UTF8String]); + + return [MPPGestureRecognizerResult + gestureRecognizerResultWithHandGesturesProto:{gesturesProto} + handednessroto:{landmarkDetectionResultProto.classifications()} + handLandmarksPacket:{landmarkDetectionResultProto.landmarks()} + worldLandmarksPacket:{landmarkDetectionResultProto.world_landmarks()} + timestampInMilliSeconds:0]; +} + +@end From 1e77468eec6fa005b0982b2a4fb758709a7ce41a Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Wed, 31 May 2023 11:29:37 +0530 Subject: [PATCH 002/106] Added iOS Gesture Recognizer ObjC Test for simple recognition --- .../ios/test/vision/gesture_recognizer/BUILD | 62 ++++ .../MPPGestureRecognizerTests.m | 287 ++++++++++++++++++ 2 files changed, 349 insertions(+) create mode 100644 mediapipe/tasks/ios/test/vision/gesture_recognizer/BUILD create mode 100644 mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m diff --git a/mediapipe/tasks/ios/test/vision/gesture_recognizer/BUILD b/mediapipe/tasks/ios/test/vision/gesture_recognizer/BUILD new file mode 100644 index 000000000..5be17a26c --- /dev/null +++ b/mediapipe/tasks/ios/test/vision/gesture_recognizer/BUILD @@ -0,0 +1,62 @@ +load("@build_bazel_rules_apple//apple:ios.bzl", "ios_unit_test") +load( + "//mediapipe/framework/tool:ios.bzl", + "MPP_TASK_MINIMUM_OS_VERSION", +) +load( + "@org_tensorflow//tensorflow/lite:special_rules.bzl", + "tflite_ios_lab_runner", +) + +package(default_visibility = ["//mediapipe/tasks:internal"]) + +licenses(["notice"]) + +# Default tags for filtering iOS targets. Targets are restricted to Apple platforms. +TFL_DEFAULT_TAGS = [ + "apple", +] + +# Following sanitizer tests are not supported by iOS test targets. +TFL_DISABLED_SANITIZER_TAGS = [ + "noasan", + "nomsan", + "notsan", +] + +objc_library( + name = "MPPGestureRecognizerObjcTestLibrary", + testonly = 1, + srcs = ["MPPGestureRecognizerTests.m"], + copts = [ + "-ObjC++", + "-std=c++17", + "-x objective-c++", + ], + data = [ + "//mediapipe/tasks/testdata/vision:test_images", + "//mediapipe/tasks/testdata/vision:gesture_recognizer.task", + "//mediapipe/tasks/testdata/vision:test_protos", + ], + deps = [ + "//mediapipe/tasks/ios/common:MPPCommon", + "//mediapipe/tasks/ios/test/vision/utils:MPPImageTestUtils", + "//mediapipe/tasks/ios/vision/gesture_recognizer:MPPGestureRecognizer", + "//mediapipe/tasks/ios/test/vision/gesture_recognizer/utils:MPPGestureRecognizerResultProtoHelpers", + ] + select({ + "//third_party:opencv_ios_sim_arm64_source_build": ["@ios_opencv_source//:opencv_xcframework"], + "//third_party:opencv_ios_arm64_source_build": ["@ios_opencv_source//:opencv_xcframework"], + "//third_party:opencv_ios_x86_64_source_build": ["@ios_opencv_source//:opencv_xcframework"], + "//conditions:default": ["@ios_opencv//:OpencvFramework"], + }), +) + +ios_unit_test( + name = "MPPGestureRecognizerObjcTest", + minimum_os_version = MPP_TASK_MINIMUM_OS_VERSION, + runner = tflite_ios_lab_runner("IOS_LATEST"), + tags = TFL_DEFAULT_TAGS + TFL_DISABLED_SANITIZER_TAGS, + deps = [ + ":MPPGestureRecognizerObjcTestLibrary", + ], +) diff --git a/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m b/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m new file mode 100644 index 000000000..1a48322b4 --- /dev/null +++ b/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m @@ -0,0 +1,287 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import + +#import "mediapipe/tasks/ios/common/sources/MPPCommon.h" +#import "mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtoHelpers.h" +#import "mediapipe/tasks/ios/test/vision/utils/sources/MPPImage+TestUtils.h" +#import "mediapipe/tasks/ios/vision/gesture_recognizer/sources/MPPGestureRecognizer.h" + +static NSDictionary *const kGestureRecognizerBundleAssetFile = + @{@"name" : @"gesture_recognizer", @"type" : @"task"}; + +static NSDictionary *const kTwoHandsImage = @{@"name" : @"right_hands", @"type" : @"jpg"}; +static NSDictionary *const kFistImage = @{@"name" : @"fist", @"type" : @"jpg"}; +static NSDictionary *const kNoHandsImage = @{@"name" : @"cats_and_dogs", @"type" : @"jpg"}; +static NSDictionary *const kThumbUpImage = @{@"name" : @"thumb_up", @"type" : @"jpg"}; +static NSDictionary *const kPointingUpRotatedImage = + @{@"name" : @"pointing_up_rotated", @"type" : @"jpg"}; + +static NSDictionary *const kExpectedFistLandmarksFile = + @{@"name" : @"fist_landmarks", @"type" : @"pbtxt"}; +static NSDictionary *const kExpectedThumbUpLandmarksFile = + @{@"name" : @"thumb_up_landmarks", @"type" : @"pbtxt"}; + +static NSString *const kFistLabel = @"Closed_Fist"; +static NSString *const kExpectedThumbUpLabel = @"Thumb_Up"; +static NSString *const kExpectedPointingUpLabel = @"Pointing_Up"; +static NSString *const kRockLabel = @"Rock"; + +static const NSInteger kGestureExpectedIndex = -1; + +static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks"; +static const float kLandmarksErrorTolerance = 0.03f; + +#define AssertEqualErrors(error, expectedError) \ + XCTAssertNotNil(error); \ + XCTAssertEqualObjects(error.domain, expectedError.domain); \ + XCTAssertEqual(error.code, expectedError.code); \ + XCTAssertNotEqual( \ + [error.localizedDescription rangeOfString:expectedError.localizedDescription].location, \ + NSNotFound) + +#define AssertEqualGestures(gesture, expectedGesture, handIndex, gestureIndex) \ + XCTAssertEqual(gesture.index, kGestureExpectedIndex, @"hand index = %d gesture index j = %d", \ + handIndex, gestureIndex); \ + XCTAssertEqualObjects(gesture.categoryName, expectedGesture.categoryName, \ + @"hand index = %d gesture index j = %d", handIndex, gestureIndex); + +#define AssertApproximatelyEqualLandmarks(landmark, expectedLandmark, handIndex, landmarkIndex) \ + XCTAssertEqualWithAccuracy(landmark.x, expectedLandmark.x, kLandmarksErrorTolerance, \ + @"hand index = %d landmark index j = %d", handIndex, landmarkIndex); \ + XCTAssertEqualWithAccuracy(landmark.y, expectedLandmark.y, kLandmarksErrorTolerance, \ + @"hand index = %d landmark index j = %d", handIndex, landmarkIndex); + +#define AssertApproximatelyEqualMultiHandLandmarks(multiHandLandmarks, expectedMultiHandLandmars) \ + XCTAssertEqual(multiHandLandmarks.count, expectedMultiHandLandmars.count) \ + XCTAssertEqualWithAccuracy(landmark.x, expectedLandmark.x, kLandmarksErrorTolerance, \ + @"hand index = %d landmark index j = %d", handIndex, handIndex); \ + XCTAssertEqualWithAccuracy(landmark.y, expectedLandmark.y, kLandmarksErrorTolerance, \ + @"hand index = %d landmark index j = %d", handIndex, handIndex); + +#define AssertGestureRecognizerResultIsEmpty(gestureRecognizerResult) \ + XCTAssertTrue(gestureRecognizerResult.gestures.count == 0); \ + XCTAssertTrue(gestureRecognizerResult.handedness.count == 0); \ + XCTAssertTrue(gestureRecognizerResult.landmarks.count == 0); \ + XCTAssertTrue(gestureRecognizerResult.worldLandmarks.count == 0); + +@interface MPPGestureRecognizerTests : XCTestCase +@end + +@implementation MPPGestureRecognizerTests + +#pragma mark Results + ++ (MPPGestureRecognizerResult *)emptyGestureRecognizerResult { + return [[MPPGestureRecognizerResult alloc] initWithGestures:@[] + handedness:@[] + landmarks:@[] + worldLandmarks:@[] + timestampInMilliseconds:0]; +} + ++ (MPPGestureRecognizerResult *)thumbUpGestureRecognizerResult { + NSString *filePath = + [MPPGestureRecognizerTests filePathWithFileInfo:kExpectedThumbUpLandmarksFile]; + + return [MPPGestureRecognizerResult + gestureRecognizerResultsFromTextEncodedProtobufFileWithName:filePath + gestureLabel:kExpectedThumbUpLabel + shouldRemoveZPosition:YES]; +} + ++ (MPPGestureRecognizerResult *)fistGestureRecognizerResultWithLabel:(NSString *)gestureLabel { + NSString *filePath = [MPPGestureRecognizerTests filePathWithFileInfo:kExpectedFistLandmarksFile]; + + return [MPPGestureRecognizerResult + gestureRecognizerResultsFromTextEncodedProtobufFileWithName:filePath + gestureLabel:gestureLabel + shouldRemoveZPosition:YES]; +} + +- (void)assertMultiHandLandmarks:(NSArray *> *)multiHandLandmarks + isApproximatelyEqualToExpectedMultiHandLandmarks: + (NSArray *> *)expectedMultiHandLandmarks { + XCTAssertEqual(multiHandLandmarks.count, expectedMultiHandLandmarks.count); + if (multiHandLandmarks.count == 0) { + return; + } + + NSArray *topHandLandmarks = multiHandLandmarks[0]; + NSArray *expectedTopHandLandmarks = expectedMultiHandLandmarks[0]; + + XCTAssertEqual(topHandLandmarks.count, expectedTopHandLandmarks.count); + for (int i = 0; i < expectedTopHandLandmarks.count; i++) { + MPPNormalizedLandmark *landmark = topHandLandmarks[i]; + XCTAssertNotNil(landmark); + AssertApproximatelyEqualLandmarks(landmark, expectedTopHandLandmarks[i], 0, i); + } +} + +- (void)assertMultiHandWorldLandmarks:(NSArray *> *)multiHandWorldLandmarks + isApproximatelyEqualToExpectedMultiHandWorldLandmarks: + (NSArray *> *)expectedMultiHandWorldLandmarks { + XCTAssertEqual(multiHandWorldLandmarks.count, expectedMultiHandWorldLandmarks.count); + if (expectedMultiHandWorldLandmarks.count == 0) { + return; + } + + NSArray *topHandWorldLandmarks = multiHandWorldLandmarks[0]; + NSArray *expectedTopHandWorldLandmarks = expectedMultiHandWorldLandmarks[0]; + + XCTAssertEqual(topHandWorldLandmarks.count, expectedTopHandWorldLandmarks.count); + for (int i = 0; i < expectedTopHandWorldLandmarks.count; i++) { + MPPLandmark *landmark = topHandWorldLandmarks[i]; + XCTAssertNotNil(landmark); + AssertApproximatelyEqualLandmarks(landmark, expectedTopHandWorldLandmarks[i], 0, i); + } +} + +- (void)assertMultiHandGestures:(NSArray *> *)multiHandGestures + isApproximatelyEqualToExpectedMultiHandGestures: + (NSArray *> *)expectedMultiHandGestures { + XCTAssertEqual(multiHandGestures.count, expectedMultiHandGestures.count); + if (multiHandGestures.count == 0) { + return; + } + + NSArray *topHandGestures = multiHandGestures[0]; + NSArray *expectedTopHandGestures = expectedMultiHandGestures[0]; + + XCTAssertEqual(topHandGestures.count, expectedTopHandGestures.count); + for (int i = 0; i < expectedTopHandGestures.count; i++) { + MPPCategory *gesture = topHandGestures[i]; + XCTAssertNotNil(gesture); + AssertEqualGestures(gesture, expectedTopHandGestures[i], 0, i); + } +} + +- (void)assertGestureRecognizerResult:(MPPGestureRecognizerResult *)gestureRecognizerResult + isApproximatelyEqualToExpectedResult: + (MPPGestureRecognizerResult *)expectedGestureRecognizerResult { + [self assertMultiHandLandmarks:gestureRecognizerResult.landmarks + isApproximatelyEqualToExpectedMultiHandLandmarks:expectedGestureRecognizerResult.landmarks]; + [self assertMultiHandWorldLandmarks:gestureRecognizerResult.worldLandmarks + isApproximatelyEqualToExpectedMultiHandWorldLandmarks:expectedGestureRecognizerResult + .worldLandmarks]; + [self assertMultiHandGestures:gestureRecognizerResult.gestures + isApproximatelyEqualToExpectedMultiHandGestures:expectedGestureRecognizerResult.gestures]; +} + +#pragma mark File + ++ (NSString *)filePathWithFileInfo:(NSDictionary *)fileInfo { + NSString *filePath = [MPPGestureRecognizerTests filePathWithName:fileInfo[@"name"] + extension:fileInfo[@"type"]]; + return filePath; +} + ++ (NSString *)filePathWithName:(NSString *)fileName extension:(NSString *)extension { + NSString *filePath = [[NSBundle bundleForClass:self.class] pathForResource:fileName + ofType:extension]; + return filePath; +} + +#pragma mark Gesture Recognizer Initializers + +- (MPPGestureRecognizerOptions *)gestureRecognizerOptionsWithModelFileInfo: + (NSDictionary *)modelFileInfo { + NSString *modelPath = [MPPGestureRecognizerTests filePathWithFileInfo:modelFileInfo]; + MPPGestureRecognizerOptions *gestureRecognizerOptions = + [[MPPGestureRecognizerOptions alloc] init]; + gestureRecognizerOptions.baseOptions.modelAssetPath = modelPath; + + return gestureRecognizerOptions; +} + +- (MPPGestureRecognizer *)createGestureRecognizerWithOptionsSucceeds: + (MPPGestureRecognizerOptions *)gestureRecognizerOptions { + MPPGestureRecognizer *gestureRecognizer = + [[MPPGestureRecognizer alloc] initWithOptions:gestureRecognizerOptions error:nil]; + XCTAssertNotNil(gestureRecognizer); + + return gestureRecognizer; +} + +- (void)assertCreateGestureRecognizerWithOptions: + (MPPGestureRecognizerOptions *)gestureRecognizerOptions + failsWithExpectedError:(NSError *)expectedError { + NSError *error = nil; + MPPGestureRecognizer *gestureRecognizer = + [[MPPGestureRecognizer alloc] initWithOptions:gestureRecognizerOptions error:&error]; + + XCTAssertNil(gestureRecognizer); + AssertEqualErrors(error, expectedError); +} + +#pragma mark Assert Gesture Recognizer Results + +- (MPPImage *)imageWithFileInfo:(NSDictionary *)fileInfo { + MPPImage *image = [MPPImage imageFromBundleWithClass:[MPPGestureRecognizerTests class] + fileName:fileInfo[@"name"] + ofType:fileInfo[@"type"]]; + XCTAssertNotNil(image); + + return image; +} + +- (MPPImage *)imageWithFileInfo:(NSDictionary *)fileInfo + orientation:(UIImageOrientation)orientation { + MPPImage *image = [MPPImage imageFromBundleWithClass:[MPPGestureRecognizerTests class] + fileName:fileInfo[@"name"] + ofType:fileInfo[@"type"] + orientation:orientation]; + XCTAssertNotNil(image); + + return image; +} + +- (MPPGestureRecognizerResult *)recognizeImageWithFileInfo:(NSDictionary *)imageFileInfo + usingGestureRecognizer: + (MPPGestureRecognizer *)gestureRecognizer { + MPPImage *mppImage = [self imageWithFileInfo:imageFileInfo]; + MPPGestureRecognizerResult *gestureRecognizerResult = [gestureRecognizer recognizeImage:mppImage + error:nil]; + XCTAssertNotNil(gestureRecognizerResult); + + return gestureRecognizerResult; +} + +- (void)assertResultsOfRecognizeImageWithFileInfo:(NSDictionary *)fileInfo + usingGestureRecognizer:(MPPGestureRecognizer *)gestureRecognizer + approximatelyEqualsGestureRecognizerResult: + (MPPGestureRecognizerResult *)expectedGestureRecognizerResult { + MPPGestureRecognizerResult *gestureRecognizerResult = + [self recognizeImageWithFileInfo:fileInfo usingGestureRecognizer:gestureRecognizer]; + [self assertGestureRecognizerResult:gestureRecognizerResult + isApproximatelyEqualToExpectedResult:expectedGestureRecognizerResult]; +} + +#pragma mark General Tests + +- (void)testRecognizeWithModelPathSucceeds { + NSString *modelPath = + [MPPGestureRecognizerTests filePathWithFileInfo:kGestureRecognizerBundleAssetFile]; + MPPGestureRecognizer *gestureRecognizer = + [[MPPGestureRecognizer alloc] initWithModelPath:modelPath error:nil]; + XCTAssertNotNil(gestureRecognizer); + + [self assertResultsOfRecognizeImageWithFileInfo:kThumbUpImage + usingGestureRecognizer:gestureRecognizer + approximatelyEqualsGestureRecognizerResult:[MPPGestureRecognizerTests + thumbUpGestureRecognizerResult]]; +} + +@end From 84560f3e7db5026fb762f570b1ee13cd5c159442 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Wed, 31 May 2023 11:36:29 +0530 Subject: [PATCH 003/106] Added more recognize tests to iOS Gesture Recognizer Objective C tests --- .../MPPGestureRecognizerTests.m | 124 ++++++++++++++++++ 1 file changed, 124 insertions(+) diff --git a/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m b/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m index 1a48322b4..94600a83f 100644 --- a/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m +++ b/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m @@ -284,4 +284,128 @@ static const float kLandmarksErrorTolerance = 0.03f; thumbUpGestureRecognizerResult]]; } +- (void)testRecognizeWithEmptyResultsSucceeds { + MPPGestureRecognizerOptions *gestureRecognizerOptions = + [self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile]; + + MPPGestureRecognizer *gestureRecognizer = + [self createGestureRecognizerWithOptionsSucceeds:gestureRecognizerOptions]; + + MPPGestureRecognizerResult *gestureRecognizerResult = + [self recognizeImageWithFileInfo:kNoHandsImage usingGestureRecognizer:gestureRecognizer]; + AssertGestureRecognizerResultIsEmpty(gestureRecognizerResult); +} + +- (void)testRecognizeWithScoreThresholdSucceeds { + MPPGestureRecognizerOptions *gestureRecognizerOptions = + [self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile]; + gestureRecognizerOptions.cannedGesturesClassifierOptions = [[MPPClassifierOptions alloc] init]; + gestureRecognizerOptions.cannedGesturesClassifierOptions.scoreThreshold = 0.5f; + + MPPGestureRecognizer *gestureRecognizer = + [self createGestureRecognizerWithOptionsSucceeds:gestureRecognizerOptions]; + + MPPGestureRecognizerResult *gestureRecognizerResult = + [self recognizeImageWithFileInfo:kThumbUpImage usingGestureRecognizer:gestureRecognizer]; + + MPPGestureRecognizerResult *expectedGestureRecognizerResult = + [MPPGestureRecognizerTests thumbUpGestureRecognizerResult]; + + XCTAssertTrue(gestureRecognizerResult.gestures.count == 1); + AssertEqualGestures(gestureRecognizerResult.gestures[0][0], + expectedGestureRecognizerResult.gestures[0][0], 0, 0); +} + +- (void)testRecognizeWithNumHandsSucceeds { + MPPGestureRecognizerOptions *gestureRecognizerOptions = + [self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile]; + + const NSInteger numberOfHands = 2; + gestureRecognizerOptions.numberOfHands = numberOfHands; + + MPPGestureRecognizer *gestureRecognizer = + [self createGestureRecognizerWithOptionsSucceeds:gestureRecognizerOptions]; + + MPPGestureRecognizerResult *gestureRecognizerResult = + [self recognizeImageWithFileInfo:kTwoHandsImage usingGestureRecognizer:gestureRecognizer]; + + XCTAssertTrue(gestureRecognizerResult.handedness.count == numberOfHands); +} + +- (void)testRecognizeWithRotationSucceeds { + MPPGestureRecognizerOptions *gestureRecognizerOptions = + [self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile]; + + const NSInteger numberOfHands = 2; + gestureRecognizerOptions.numberOfHands = numberOfHands; + + MPPGestureRecognizer *gestureRecognizer = + [self createGestureRecognizerWithOptionsSucceeds:gestureRecognizerOptions]; + MPPImage *mppImage = [self imageWithFileInfo:kPointingUpRotatedImage + orientation:UIImageOrientationRight]; + + MPPGestureRecognizerResult *gestureRecognizerResult = [gestureRecognizer recognizeImage:mppImage + error:nil]; + + XCTAssertNotNil(gestureRecognizerResult); + + const NSInteger expectedGesturesCount = 1; + + XCTAssertEqual(gestureRecognizerResult.gestures.count, expectedGesturesCount); + XCTAssertEqualObjects(gestureRecognizerResult.gestures[0][0].categoryName, + kExpectedPointingUpLabel); +} + +- (void)testRecognizeWithCannedGestureFistSucceeds { + MPPGestureRecognizerOptions *gestureRecognizerOptions = + [self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile]; + + const NSInteger numberOfHands = 1; + gestureRecognizerOptions.numberOfHands = numberOfHands; + + MPPGestureRecognizer *gestureRecognizer = + [self createGestureRecognizerWithOptionsSucceeds:gestureRecognizerOptions]; + + [self assertResultsOfRecognizeImageWithFileInfo:kFistImage + usingGestureRecognizer:gestureRecognizer + approximatelyEqualsGestureRecognizerResult: + [MPPGestureRecognizerTests fistGestureRecognizerResultWithLabel:kFistLabel]]; +} + +- (void)testRecognizeWithAllowGestureFistSucceeds { + MPPGestureRecognizerOptions *gestureRecognizerOptions = + [self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile]; + gestureRecognizerOptions.cannedGesturesClassifierOptions = [[MPPClassifierOptions alloc] init]; + gestureRecognizerOptions.cannedGesturesClassifierOptions.scoreThreshold = 0.5f; + gestureRecognizerOptions.cannedGesturesClassifierOptions.categoryAllowlist = @[ kFistLabel ]; + + const NSInteger numberOfHands = 1; + gestureRecognizerOptions.numberOfHands = numberOfHands; + + MPPGestureRecognizer *gestureRecognizer = + [self createGestureRecognizerWithOptionsSucceeds:gestureRecognizerOptions]; + + [self assertResultsOfRecognizeImageWithFileInfo:kFistImage + usingGestureRecognizer:gestureRecognizer + approximatelyEqualsGestureRecognizerResult: + [MPPGestureRecognizerTests fistGestureRecognizerResultWithLabel:kFistLabel]]; +} + +- (void)testRecognizeWithDenyGestureFistSucceeds { + MPPGestureRecognizerOptions *gestureRecognizerOptions = + [self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile]; + gestureRecognizerOptions.cannedGesturesClassifierOptions = [[MPPClassifierOptions alloc] init]; + gestureRecognizerOptions.cannedGesturesClassifierOptions.scoreThreshold = 0.5f; + gestureRecognizerOptions.cannedGesturesClassifierOptions.categoryDenylist = @[ kFistLabel ]; + + const NSInteger numberOfHands = 1; + gestureRecognizerOptions.numberOfHands = numberOfHands; + + MPPGestureRecognizer *gestureRecognizer = + [self createGestureRecognizerWithOptionsSucceeds:gestureRecognizerOptions]; + MPPGestureRecognizerResult *gestureRecognizerResult = + [self recognizeImageWithFileInfo:kFistImage usingGestureRecognizer:gestureRecognizer]; + AssertGestureRecognizerResultIsEmpty(gestureRecognizerResult); +} + @end From f3f664300cac55b34ab478b26b9780c0ccd30bcc Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Wed, 31 May 2023 11:50:39 +0530 Subject: [PATCH 004/106] Added convenience method for creating results for tests in MPPGestureRecognizerResult Helpers --- .../MPPGestureRecognizerResult+Helpers.h | 24 +++ .../MPPGestureRecognizerResult+Helpers.mm | 162 ++++++++++-------- 2 files changed, 110 insertions(+), 76 deletions(-) diff --git a/mediapipe/tasks/ios/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+Helpers.h b/mediapipe/tasks/ios/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+Helpers.h index 6b0f8bf81..5e75febf3 100644 --- a/mediapipe/tasks/ios/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+Helpers.h +++ b/mediapipe/tasks/ios/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+Helpers.h @@ -43,6 +43,30 @@ static const int kMicroSecondsPerMilliSecond = 1000; handLandmarksPacket:(const mediapipe::Packet &)handLandmarksPacket worldLandmarksPacket:(const mediapipe::Packet &)worldLandmarksPacket; +/** + * Creates an `MPPGestureRecognizerResult` from hand gestures, handedness, hand landmarks and world + * landmarks proto vectors. + * + * @param handGesturesProto A vector of protos of type `std::vector`. + * @param handednessPacket A vector of protos of type `std::vector`. + * @param handLandmarksPacket A vector of protos of type `std::vector`. + * @param handLandmarksPacket A vector of protos of type `std::vector`. + * + * @return An `MPPGestureRecognizerResult` object that contains the hand gesture recognition + * results. + */ ++ (MPPGestureRecognizerResult *) + gestureRecognizerResultWithHandGesturesProto: + (const std::vector &)handGesturesProto + handednessroto: + (const std::vector &) + handednessProto + handLandmarksPacket: + (const std::vector &) + handLandmarksProto + worldLandmarksPacket: + (const std::vector &)worldLandmarksProto + timestampInMilliSeconds:(NSInteger)timestampInMilliseconds; @end NS_ASSUME_NONNULL_END diff --git a/mediapipe/tasks/ios/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+Helpers.mm b/mediapipe/tasks/ios/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+Helpers.mm index 8eed2a923..5162dd891 100644 --- a/mediapipe/tasks/ios/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+Helpers.mm +++ b/mediapipe/tasks/ios/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+Helpers.mm @@ -17,12 +17,6 @@ #import "mediapipe/tasks/ios/components/containers/utils/sources/MPPCategory+Helpers.h" #import "mediapipe/tasks/ios/components/containers/utils/sources/MPPLandmark+Helpers.h" -#include "mediapipe/framework/formats/classification.pb.h" -#include "mediapipe/framework/formats/landmark.pb.h" -#include "mediapipe/framework/packet.h" - -static const NSInteger kDefaultGestureIndex = -1; - namespace { using ClassificationListProto = ::mediapipe::ClassificationList; using LandmarkListProto = ::mediapipe::LandmarkList; @@ -30,6 +24,8 @@ using NormalizedLandmarkListProto = ::mediapipe::NormalizedLandmarkList; using ::mediapipe::Packet; } // namespace +static const NSInteger kDefaultGestureIndex = -1; + @implementation MPPGestureRecognizerResult (Helpers) + (MPPGestureRecognizerResult *)emptyGestureRecognizerResultWithTimestampInMilliseconds: @@ -41,6 +37,80 @@ using ::mediapipe::Packet; timestampInMilliseconds:timestampInMilliseconds]; } ++ (MPPGestureRecognizerResult *) + gestureRecognizerResultWithHandGesturesProto: + (const std::vector &)handGesturesProto + handednessroto: + (const std::vector &)handednessProto + handLandmarksPacket:(const std::vector &) + handLandmarksProto + worldLandmarksPacket: + (const std::vector &)worldLandmarksProto + timestampInMilliSeconds:(NSInteger)timestampInMilliseconds { + NSMutableArray *> *multiHandGestures = + [NSMutableArray arrayWithCapacity:(NSUInteger)handGesturesProto.size()]; + + for (const auto &classificationListProto : handGesturesProto) { + NSMutableArray *gestures = [NSMutableArray + arrayWithCapacity:(NSUInteger)classificationListProto.classification().size()]; + for (const auto &classificationProto : classificationListProto.classification()) { + MPPCategory *category = [MPPCategory categoryWithProto:classificationProto + index:kDefaultGestureIndex]; + [gestures addObject:category]; + } + [multiHandGestures addObject:gestures]; + } + + NSMutableArray *> *multiHandHandedness = + [NSMutableArray arrayWithCapacity:(NSUInteger)handednessProto.size()]; + + for (const auto &classificationListProto : handednessProto) { + NSMutableArray *handedness = [NSMutableArray + arrayWithCapacity:(NSUInteger)classificationListProto.classification().size()]; + for (const auto &classificationProto : classificationListProto.classification()) { + MPPCategory *category = [MPPCategory categoryWithProto:classificationProto]; + [handedness addObject:category]; + } + [multiHandHandedness addObject:handedness]; + } + + NSMutableArray *> *multiHandLandmarks = + [NSMutableArray arrayWithCapacity:(NSUInteger)handLandmarksProto.size()]; + + for (const auto &handLandmarkListProto : handLandmarksProto) { + NSMutableArray *handLandmarks = + [NSMutableArray arrayWithCapacity:(NSUInteger)handLandmarkListProto.landmark().size()]; + for (const auto &normalizedLandmarkProto : handLandmarkListProto.landmark()) { + MPPNormalizedLandmark *normalizedLandmark = + [MPPNormalizedLandmark normalizedLandmarkWithProto:normalizedLandmarkProto]; + [handLandmarks addObject:normalizedLandmark]; + } + [multiHandLandmarks addObject:handLandmarks]; + } + + NSMutableArray *> *multiHandWorldLandmarks = + [NSMutableArray arrayWithCapacity:(NSUInteger)worldLandmarksProto.size()]; + + for (const auto &worldLandmarkListProto : worldLandmarksProto) { + NSMutableArray *worldLandmarks = + [NSMutableArray arrayWithCapacity:(NSUInteger)worldLandmarkListProto.landmark().size()]; + for (const auto &landmarkProto : worldLandmarkListProto.landmark()) { + MPPLandmark *landmark = [MPPLandmark landmarkWithProto:landmarkProto]; + [worldLandmarks addObject:landmark]; + } + [multiHandWorldLandmarks addObject:worldLandmarks]; + } + + MPPGestureRecognizerResult *gestureRecognizerResult = + [[MPPGestureRecognizerResult alloc] initWithGestures:multiHandGestures + handedness:multiHandHandedness + landmarks:multiHandLandmarks + worldLandmarks:multiHandWorldLandmarks + timestampInMilliseconds:timestampInMilliseconds]; + + return gestureRecognizerResult; +} + + (MPPGestureRecognizerResult *) gestureRecognizerResultWithHandGesturesPacket:(const Packet &)handGesturesPacket handednessPacket:(const Packet &)handednessPacket @@ -62,76 +132,16 @@ using ::mediapipe::Packet; emptyGestureRecognizerResultWithTimestampInMilliseconds:timestampInMilliseconds]; } - const std::vector &handGesturesClassificationListProtos = - handGesturesPacket.Get>(); - NSMutableArray *> *multiHandGestures = - [NSMutableArray arrayWithCapacity:(NSUInteger)handGesturesClassificationListProtos.size()]; - - for (const auto &classificationListProto : handGesturesClassificationListProtos) { - NSMutableArray *gestures = [NSMutableArray - arrayWithCapacity:(NSUInteger)classificationListProto.classification().size()]; - for (const auto &classificationProto : classificationListProto.classification()) { - MPPCategory *category = [MPPCategory categoryWithProto:classificationProto - index:kDefaultGestureIndex]; - [gestures addObject:category]; - } - [multiHandGestures addObject:gestures]; - } - - const std::vector &handednessClassificationListProtos = - handednessPacket.Get>(); - NSMutableArray *> *multiHandHandedness = - [NSMutableArray arrayWithCapacity:(NSUInteger)handednessClassificationListProtos.size()]; - - for (const auto &classificationListProto : handednessClassificationListProtos) { - NSMutableArray *handedness = [NSMutableArray - arrayWithCapacity:(NSUInteger)classificationListProto.classification().size()]; - for (const auto &classificationProto : classificationListProto.classification()) { - MPPCategory *category = [MPPCategory categoryWithProto:classificationProto]; - [handedness addObject:category]; - } - [multiHandHandedness addObject:handedness]; - } - - const std::vector &handLandmarkListProtos = - handLandmarksPacket.Get>(); - NSMutableArray *> *multiHandLandmarks = - [NSMutableArray arrayWithCapacity:(NSUInteger)handLandmarkListProtos.size()]; - - for (const auto &handLandmarkListProto : handLandmarkListProtos) { - NSMutableArray *handLandmarks = - [NSMutableArray arrayWithCapacity:(NSUInteger)handLandmarkListProto.landmark().size()]; - for (const auto &normalizedLandmarkProto : handLandmarkListProto.landmark()) { - MPPNormalizedLandmark *normalizedLandmark = - [MPPNormalizedLandmark normalizedLandmarkWithProto:normalizedLandmarkProto]; - [handLandmarks addObject:normalizedLandmark]; - } - [multiHandLandmarks addObject:handLandmarks]; - } - - const std::vector &worldLandmarkListProtos = - worldLandmarksPacket.Get>(); - NSMutableArray *> *multiHandWorldLandmarks = - [NSMutableArray arrayWithCapacity:(NSUInteger)worldLandmarkListProtos.size()]; - - for (const auto &worldLandmarkListProto : worldLandmarkListProtos) { - NSMutableArray *worldLandmarks = - [NSMutableArray arrayWithCapacity:(NSUInteger)worldLandmarkListProto.landmark().size()]; - for (const auto &landmarkProto : worldLandmarkListProto.landmark()) { - MPPLandmark *landmark = [MPPLandmark landmarkWithProto:landmarkProto]; - [worldLandmarks addObject:landmark]; - } - [multiHandWorldLandmarks addObject:worldLandmarks]; - } - - MPPGestureRecognizerResult *gestureRecognizerResult = - [[MPPGestureRecognizerResult alloc] initWithGestures:multiHandGestures - handedness:multiHandHandedness - landmarks:multiHandLandmarks - worldLandmarks:multiHandWorldLandmarks - timestampInMilliseconds:timestampInMilliseconds]; - - return gestureRecognizerResult; + return [MPPGestureRecognizerResult + gestureRecognizerResultWithHandGesturesProto:handGesturesPacket + .Get>() + handednessroto:handednessPacket + .Get>() + handLandmarksPacket:handLandmarksPacket + .Get>() + worldLandmarksPacket:worldLandmarksPacket + .Get>() + timestampInMilliSeconds:timestampInMilliseconds]; } @end From 9546596b5af9151152d47e158db2e3e59c26d695 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Wed, 31 May 2023 11:51:00 +0530 Subject: [PATCH 005/106] Updated variable name in MPPGestureRecognizerTests.m --- .../MPPGestureRecognizerTests.m | 22 +++++++++---------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m b/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m index 94600a83f..c1504da00 100644 --- a/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m +++ b/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m @@ -320,8 +320,8 @@ static const float kLandmarksErrorTolerance = 0.03f; MPPGestureRecognizerOptions *gestureRecognizerOptions = [self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile]; - const NSInteger numberOfHands = 2; - gestureRecognizerOptions.numberOfHands = numberOfHands; + const NSInteger numHands = 2; + gestureRecognizerOptions.numHands = numHands; MPPGestureRecognizer *gestureRecognizer = [self createGestureRecognizerWithOptionsSucceeds:gestureRecognizerOptions]; @@ -329,15 +329,15 @@ static const float kLandmarksErrorTolerance = 0.03f; MPPGestureRecognizerResult *gestureRecognizerResult = [self recognizeImageWithFileInfo:kTwoHandsImage usingGestureRecognizer:gestureRecognizer]; - XCTAssertTrue(gestureRecognizerResult.handedness.count == numberOfHands); + XCTAssertTrue(gestureRecognizerResult.handedness.count == numHands); } - (void)testRecognizeWithRotationSucceeds { MPPGestureRecognizerOptions *gestureRecognizerOptions = [self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile]; - const NSInteger numberOfHands = 2; - gestureRecognizerOptions.numberOfHands = numberOfHands; + const NSInteger numHands = 2; + gestureRecognizerOptions.numHands = numHands; MPPGestureRecognizer *gestureRecognizer = [self createGestureRecognizerWithOptionsSucceeds:gestureRecognizerOptions]; @@ -360,8 +360,8 @@ static const float kLandmarksErrorTolerance = 0.03f; MPPGestureRecognizerOptions *gestureRecognizerOptions = [self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile]; - const NSInteger numberOfHands = 1; - gestureRecognizerOptions.numberOfHands = numberOfHands; + const NSInteger numHands = 1; + gestureRecognizerOptions.numHands = numHands; MPPGestureRecognizer *gestureRecognizer = [self createGestureRecognizerWithOptionsSucceeds:gestureRecognizerOptions]; @@ -379,8 +379,8 @@ static const float kLandmarksErrorTolerance = 0.03f; gestureRecognizerOptions.cannedGesturesClassifierOptions.scoreThreshold = 0.5f; gestureRecognizerOptions.cannedGesturesClassifierOptions.categoryAllowlist = @[ kFistLabel ]; - const NSInteger numberOfHands = 1; - gestureRecognizerOptions.numberOfHands = numberOfHands; + const NSInteger numHands = 1; + gestureRecognizerOptions.numHands = numHands; MPPGestureRecognizer *gestureRecognizer = [self createGestureRecognizerWithOptionsSucceeds:gestureRecognizerOptions]; @@ -398,8 +398,8 @@ static const float kLandmarksErrorTolerance = 0.03f; gestureRecognizerOptions.cannedGesturesClassifierOptions.scoreThreshold = 0.5f; gestureRecognizerOptions.cannedGesturesClassifierOptions.categoryDenylist = @[ kFistLabel ]; - const NSInteger numberOfHands = 1; - gestureRecognizerOptions.numberOfHands = numberOfHands; + const NSInteger numHands = 1; + gestureRecognizerOptions.numHands = numHands; MPPGestureRecognizer *gestureRecognizer = [self createGestureRecognizerWithOptionsSucceeds:gestureRecognizerOptions]; From 365956807db10c434ea2e16b23ae43055a4a3c89 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Wed, 31 May 2023 11:52:29 +0530 Subject: [PATCH 006/106] Added gesture_recognizer.task to vision tasks test data --- mediapipe/tasks/testdata/vision/BUILD | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mediapipe/tasks/testdata/vision/BUILD b/mediapipe/tasks/testdata/vision/BUILD index 632e8aa4e..f6153dd12 100644 --- a/mediapipe/tasks/testdata/vision/BUILD +++ b/mediapipe/tasks/testdata/vision/BUILD @@ -54,6 +54,7 @@ mediapipe_files(srcs = [ "hand_landmark_full.tflite", "hand_landmark_lite.tflite", "hand_landmarker.task", + "gesture_recognizer.task", "left_hands.jpg", "left_hands_rotated.jpg", "mobilenet_v1_0.25_192_quantized_1_default_1.tflite", @@ -104,7 +105,6 @@ exports_files( "expected_right_down_hand_landmarks.prototxt", "expected_right_up_hand_landmarks.prototxt", "face_geometry_expected_out.pbtxt", - "gesture_recognizer.task", "portrait_expected_detection.pbtxt", "portrait_expected_face_geometry.pbtxt", "portrait_rotated_expected_detection.pbtxt", From c87e21206a630d43d8ed0961d22d2d7fb58ed1de Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Wed, 31 May 2023 11:53:41 +0530 Subject: [PATCH 007/106] Removed few test from MPPGestureRecognizerTests.m --- .../MPPGestureRecognizerTests.m | 112 ------------------ 1 file changed, 112 deletions(-) diff --git a/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m b/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m index c1504da00..3b9d6367f 100644 --- a/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m +++ b/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m @@ -296,116 +296,4 @@ static const float kLandmarksErrorTolerance = 0.03f; AssertGestureRecognizerResultIsEmpty(gestureRecognizerResult); } -- (void)testRecognizeWithScoreThresholdSucceeds { - MPPGestureRecognizerOptions *gestureRecognizerOptions = - [self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile]; - gestureRecognizerOptions.cannedGesturesClassifierOptions = [[MPPClassifierOptions alloc] init]; - gestureRecognizerOptions.cannedGesturesClassifierOptions.scoreThreshold = 0.5f; - - MPPGestureRecognizer *gestureRecognizer = - [self createGestureRecognizerWithOptionsSucceeds:gestureRecognizerOptions]; - - MPPGestureRecognizerResult *gestureRecognizerResult = - [self recognizeImageWithFileInfo:kThumbUpImage usingGestureRecognizer:gestureRecognizer]; - - MPPGestureRecognizerResult *expectedGestureRecognizerResult = - [MPPGestureRecognizerTests thumbUpGestureRecognizerResult]; - - XCTAssertTrue(gestureRecognizerResult.gestures.count == 1); - AssertEqualGestures(gestureRecognizerResult.gestures[0][0], - expectedGestureRecognizerResult.gestures[0][0], 0, 0); -} - -- (void)testRecognizeWithNumHandsSucceeds { - MPPGestureRecognizerOptions *gestureRecognizerOptions = - [self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile]; - - const NSInteger numHands = 2; - gestureRecognizerOptions.numHands = numHands; - - MPPGestureRecognizer *gestureRecognizer = - [self createGestureRecognizerWithOptionsSucceeds:gestureRecognizerOptions]; - - MPPGestureRecognizerResult *gestureRecognizerResult = - [self recognizeImageWithFileInfo:kTwoHandsImage usingGestureRecognizer:gestureRecognizer]; - - XCTAssertTrue(gestureRecognizerResult.handedness.count == numHands); -} - -- (void)testRecognizeWithRotationSucceeds { - MPPGestureRecognizerOptions *gestureRecognizerOptions = - [self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile]; - - const NSInteger numHands = 2; - gestureRecognizerOptions.numHands = numHands; - - MPPGestureRecognizer *gestureRecognizer = - [self createGestureRecognizerWithOptionsSucceeds:gestureRecognizerOptions]; - MPPImage *mppImage = [self imageWithFileInfo:kPointingUpRotatedImage - orientation:UIImageOrientationRight]; - - MPPGestureRecognizerResult *gestureRecognizerResult = [gestureRecognizer recognizeImage:mppImage - error:nil]; - - XCTAssertNotNil(gestureRecognizerResult); - - const NSInteger expectedGesturesCount = 1; - - XCTAssertEqual(gestureRecognizerResult.gestures.count, expectedGesturesCount); - XCTAssertEqualObjects(gestureRecognizerResult.gestures[0][0].categoryName, - kExpectedPointingUpLabel); -} - -- (void)testRecognizeWithCannedGestureFistSucceeds { - MPPGestureRecognizerOptions *gestureRecognizerOptions = - [self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile]; - - const NSInteger numHands = 1; - gestureRecognizerOptions.numHands = numHands; - - MPPGestureRecognizer *gestureRecognizer = - [self createGestureRecognizerWithOptionsSucceeds:gestureRecognizerOptions]; - - [self assertResultsOfRecognizeImageWithFileInfo:kFistImage - usingGestureRecognizer:gestureRecognizer - approximatelyEqualsGestureRecognizerResult: - [MPPGestureRecognizerTests fistGestureRecognizerResultWithLabel:kFistLabel]]; -} - -- (void)testRecognizeWithAllowGestureFistSucceeds { - MPPGestureRecognizerOptions *gestureRecognizerOptions = - [self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile]; - gestureRecognizerOptions.cannedGesturesClassifierOptions = [[MPPClassifierOptions alloc] init]; - gestureRecognizerOptions.cannedGesturesClassifierOptions.scoreThreshold = 0.5f; - gestureRecognizerOptions.cannedGesturesClassifierOptions.categoryAllowlist = @[ kFistLabel ]; - - const NSInteger numHands = 1; - gestureRecognizerOptions.numHands = numHands; - - MPPGestureRecognizer *gestureRecognizer = - [self createGestureRecognizerWithOptionsSucceeds:gestureRecognizerOptions]; - - [self assertResultsOfRecognizeImageWithFileInfo:kFistImage - usingGestureRecognizer:gestureRecognizer - approximatelyEqualsGestureRecognizerResult: - [MPPGestureRecognizerTests fistGestureRecognizerResultWithLabel:kFistLabel]]; -} - -- (void)testRecognizeWithDenyGestureFistSucceeds { - MPPGestureRecognizerOptions *gestureRecognizerOptions = - [self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile]; - gestureRecognizerOptions.cannedGesturesClassifierOptions = [[MPPClassifierOptions alloc] init]; - gestureRecognizerOptions.cannedGesturesClassifierOptions.scoreThreshold = 0.5f; - gestureRecognizerOptions.cannedGesturesClassifierOptions.categoryDenylist = @[ kFistLabel ]; - - const NSInteger numHands = 1; - gestureRecognizerOptions.numHands = numHands; - - MPPGestureRecognizer *gestureRecognizer = - [self createGestureRecognizerWithOptionsSucceeds:gestureRecognizerOptions]; - MPPGestureRecognizerResult *gestureRecognizerResult = - [self recognizeImageWithFileInfo:kFistImage usingGestureRecognizer:gestureRecognizer]; - AssertGestureRecognizerResultIsEmpty(gestureRecognizerResult); -} - @end From f77e685ff97bbc5e8a8950e054d83fce7d13a706 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Wed, 31 May 2023 20:34:02 +0530 Subject: [PATCH 008/106] Removed unwanted header import from MPPGestureRecognizer.h --- mediapipe/tasks/ios/vision/gesture_recognizer/BUILD | 1 - .../ios/vision/gesture_recognizer/sources/MPPGestureRecognizer.h | 1 - 2 files changed, 2 deletions(-) diff --git a/mediapipe/tasks/ios/vision/gesture_recognizer/BUILD b/mediapipe/tasks/ios/vision/gesture_recognizer/BUILD index 78a07e17d..d9a76afde 100644 --- a/mediapipe/tasks/ios/vision/gesture_recognizer/BUILD +++ b/mediapipe/tasks/ios/vision/gesture_recognizer/BUILD @@ -56,7 +56,6 @@ objc_library( "//mediapipe/tasks/ios/common/utils:MPPCommonUtils", "//mediapipe/tasks/ios/common/utils:NSStringHelpers", "//mediapipe/tasks/ios/core:MPPTaskInfo", - "//mediapipe/tasks/ios/core:MPPTaskOptions", "//mediapipe/tasks/ios/vision/core:MPPImage", "//mediapipe/tasks/ios/vision/core:MPPVisionPacketCreator", "//mediapipe/tasks/ios/vision/core:MPPVisionTaskRunner", diff --git a/mediapipe/tasks/ios/vision/gesture_recognizer/sources/MPPGestureRecognizer.h b/mediapipe/tasks/ios/vision/gesture_recognizer/sources/MPPGestureRecognizer.h index ed8ff30f9..65136dc83 100644 --- a/mediapipe/tasks/ios/vision/gesture_recognizer/sources/MPPGestureRecognizer.h +++ b/mediapipe/tasks/ios/vision/gesture_recognizer/sources/MPPGestureRecognizer.h @@ -14,7 +14,6 @@ #import -#import "mediapipe/tasks/ios/core/sources/MPPTaskOptions.h" #import "mediapipe/tasks/ios/vision/core/sources/MPPImage.h" #import "mediapipe/tasks/ios/vision/gesture_recognizer/sources/MPPGestureRecognizerOptions.h" #import "mediapipe/tasks/ios/vision/gesture_recognizer/sources/MPPGestureRecognizerResult.h" From 955489d71dce7a18795aa0f1dcbc421744fbd225 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Wed, 31 May 2023 20:39:01 +0530 Subject: [PATCH 009/106] Removed a test from iOS ObjC Gesture Recognizer tests --- .../gesture_recognizer/MPPGestureRecognizerTests.m | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m b/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m index 3b9d6367f..1a48322b4 100644 --- a/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m +++ b/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m @@ -284,16 +284,4 @@ static const float kLandmarksErrorTolerance = 0.03f; thumbUpGestureRecognizerResult]]; } -- (void)testRecognizeWithEmptyResultsSucceeds { - MPPGestureRecognizerOptions *gestureRecognizerOptions = - [self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile]; - - MPPGestureRecognizer *gestureRecognizer = - [self createGestureRecognizerWithOptionsSucceeds:gestureRecognizerOptions]; - - MPPGestureRecognizerResult *gestureRecognizerResult = - [self recognizeImageWithFileInfo:kNoHandsImage usingGestureRecognizer:gestureRecognizer]; - AssertGestureRecognizerResultIsEmpty(gestureRecognizerResult); -} - @end From 71f2f8f43b53f56c7ffea763bca18c8e2aaab7d2 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Wed, 31 May 2023 20:42:49 +0530 Subject: [PATCH 010/106] Added MPPHandLandmarkerResult Helpers --- .../ios/vision/hand_landmarker/utils/BUILD | 14 ++ .../sources/MPPHandLandmarkerResult+Helpers.h | 64 +++++++++ .../MPPHandLandmarkerResult+Helpers.mm | 122 ++++++++++++++++++ 3 files changed, 200 insertions(+) create mode 100644 mediapipe/tasks/ios/vision/hand_landmarker/utils/sources/MPPHandLandmarkerResult+Helpers.h create mode 100644 mediapipe/tasks/ios/vision/hand_landmarker/utils/sources/MPPHandLandmarkerResult+Helpers.mm diff --git a/mediapipe/tasks/ios/vision/hand_landmarker/utils/BUILD b/mediapipe/tasks/ios/vision/hand_landmarker/utils/BUILD index dfb85a218..ed22d171f 100644 --- a/mediapipe/tasks/ios/vision/hand_landmarker/utils/BUILD +++ b/mediapipe/tasks/ios/vision/hand_landmarker/utils/BUILD @@ -31,3 +31,17 @@ objc_library( "//mediapipe/tasks/ios/vision/hand_landmarker:MPPHandLandmarkerOptions", ], ) + +objc_library( + name = "MPPHandLandmarkerResultHelpers", + srcs = ["sources/MPPHandLandmarkerResult+Helpers.mm"], + hdrs = ["sources/MPPHandLandmarkerResult+Helpers.h"], + deps = [ + "//mediapipe/framework:packet", + "//mediapipe/framework/formats:classification_cc_proto", + "//mediapipe/framework/formats:landmark_cc_proto", + "//mediapipe/tasks/ios/components/containers/utils:MPPCategoryHelpers", + "//mediapipe/tasks/ios/components/containers/utils:MPPLandmarkHelpers", + "//mediapipe/tasks/ios/vision/hand_landmarker:MPPHandLandmarkerResult", + ], +) diff --git a/mediapipe/tasks/ios/vision/hand_landmarker/utils/sources/MPPHandLandmarkerResult+Helpers.h b/mediapipe/tasks/ios/vision/hand_landmarker/utils/sources/MPPHandLandmarkerResult+Helpers.h new file mode 100644 index 000000000..fa7bd42ce --- /dev/null +++ b/mediapipe/tasks/ios/vision/hand_landmarker/utils/sources/MPPHandLandmarkerResult+Helpers.h @@ -0,0 +1,64 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import "mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarkerResult.h" + +#include "mediapipe/framework/formats/classification.pb.h" +#include "mediapipe/framework/formats/landmark.pb.h" +#include "mediapipe/framework/packet.h" + +NS_ASSUME_NONNULL_BEGIN + +static const int kMicroSecondsPerMilliSecond = 1000; + +@interface MPPHandLandmarkerResult (Helpers) + +/** + * Creates an `MPPHandLandmarkerResult` from landmarks, world landmarks and handedness packets. + * + * @param landmarksPacket A MediaPipe packet wrapping + * a`std::vector`. + * @param worldLandmarksPacket A MediaPipe packet wrapping a`std::vector`. + * @param handednessPacket a MediaPipe packet wrapping a`std::vector`. + * + * @return An `MPPHandLandmarkerResult` object that contains the hand landmark detection + * results. + */ ++ (MPPHandLandmarkerResult *) + handLandmarkerResultWithLandmarksPacket:(const mediapipe::Packet &)handLandmarksPacket + worldLandmarksPacket:(const mediapipe::Packet &)worldLandmarksPacket + handednessPacket:(const mediapipe::Packet &)handednessPacket; + +/** + * Creates an `MPPHandLandmarkerResult` from handedness, landmarks and world landmarks proto + * vectors. + * + * @param landmarksProto A vector of protos of type `std::vector`. + * @param worldLandmarksPacket A vector of protos of type `std::vector`. + * @param handednessPacket A vector of protos of type `std::vector`. + * + * @return An `MPPHandLandmarkerResult` object that contains the hand landmark detection + * results. + */ ++ (MPPHandLandmarkerResult *) + handLandmarkerResultWithLandmarksProto: + (const std::vector &)landmarksProto + worldLandmarksProto: + (const std::vector &)worldLandmarksProto + handednessProto: + (const std::vector &)handednessProto; + +@end + +NS_ASSUME_NONNULL_END diff --git a/mediapipe/tasks/ios/vision/hand_landmarker/utils/sources/MPPHandLandmarkerResult+Helpers.mm b/mediapipe/tasks/ios/vision/hand_landmarker/utils/sources/MPPHandLandmarkerResult+Helpers.mm new file mode 100644 index 000000000..2936853ab --- /dev/null +++ b/mediapipe/tasks/ios/vision/hand_landmarker/utils/sources/MPPHandLandmarkerResult+Helpers.mm @@ -0,0 +1,122 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import "mediapipe/tasks/ios/vision/hand_landmarker/utils/sources/MPPHandLandmarkerResult+Helpers.h" + +#import "mediapipe/tasks/ios/components/containers/utils/sources/MPPCategory+Helpers.h" +#import "mediapipe/tasks/ios/components/containers/utils/sources/MPPLandmark+Helpers.h" + +namespace { +using ClassificationListProto = ::mediapipe::ClassificationList; +using LandmarkListProto = ::mediapipe::LandmarkList; +using NormalizedLandmarkListProto = ::mediapipe::NormalizedLandmarkList; +using ::mediapipe::Packet; +} // namespace + +@implementation MPPHandLandmarkerResult (Helpers) + ++ (MPPHandLandmarkerResult *)emptyHandLandmarkerResultWithTimestampInMilliseconds: + (NSInteger)timestampInMilliseconds { + return [[MPPHandLandmarkerResult alloc] initWithLandmarks:@[] + worldLandmarks:@[] + handedness:@[] + timestampInMilliseconds:timestampInMilliseconds]; +} + ++ (MPPHandLandmarkerResult *) + handLandmarkerResultWithLandmarksProto: + (const std::vector &)landmarksProto + worldLandmarksProto: + (const std::vector &)worldLandmarksProto + handednessProto: + (const std::vector &)handednessProto + timestampInMilliSeconds:(NSInteger)timestampInMilliseconds { + NSMutableArray *> *multiHandLandmarks = + [NSMutableArray arrayWithCapacity:(NSUInteger)landmarksProto.size()]; + + for (const auto &landmarkListProto : landmarksProto) { + NSMutableArray *landmarks = + [NSMutableArray arrayWithCapacity:(NSUInteger)landmarkListProto.landmark().size()]; + for (const auto &normalizedLandmarkProto : landmarkListProto.landmark()) { + MPPNormalizedLandmark *normalizedLandmark = + [MPPNormalizedLandmark normalizedLandmarkWithProto:normalizedLandmarkProto]; + [landmarks addObject:normalizedLandmark]; + } + [multiHandLandmarks addObject:landmarks]; + } + + NSMutableArray *> *multiHandWorldLandmarks = + [NSMutableArray arrayWithCapacity:(NSUInteger)worldLandmarksProto.size()]; + + for (const auto &worldLandmarkListProto : worldLandmarksProto) { + NSMutableArray *worldLandmarks = + [NSMutableArray arrayWithCapacity:(NSUInteger)worldLandmarkListProto.landmark().size()]; + for (const auto &landmarkProto : worldLandmarkListProto.landmark()) { + MPPLandmark *landmark = [MPPLandmark landmarkWithProto:landmarkProto]; + [worldLandmarks addObject:landmark]; + } + [multiHandWorldLandmarks addObject:worldLandmarks]; + } + + NSMutableArray *> *multiHandHandedness = + [NSMutableArray arrayWithCapacity:(NSUInteger)handednessProto.size()]; + + for (const auto &classificationListProto : handednessProto) { + NSMutableArray *handedness = [NSMutableArray + arrayWithCapacity:(NSUInteger)classificationListProto.classification().size()]; + for (const auto &classificationProto : classificationListProto.classification()) { + MPPCategory *category = [MPPCategory categoryWithProto:classificationProto]; + [handedness addObject:category]; + } + [multiHandHandedness addObject:handedness]; + } + + MPPHandLandmarkerResult *handLandmarkerResult = + [[MPPHandLandmarkerResult alloc] initWithLandmarks:multiHandLandmarks + worldLandmarks:multiHandWorldLandmarks + handedness:multiHandHandedness + timestampInMilliseconds:timestampInMilliseconds]; + return handLandmarkerResult; +} + ++ (MPPHandLandmarkerResult *) + handLandmarkerResultWithLandmarksPacket:(const Packet &)landmarksPacket + worldLandmarksPacket:(const Packet &)worldLandmarksPacket + handednessPacket:(const Packet &)handednessPacket { + NSInteger timestampInMilliseconds = + (NSInteger)(landmarksPacket.Timestamp().Value() / kMicroSecondsPerMilliSecond); + + if (landmarksPacket.IsEmpty()) { + return [MPPHandLandmarkerResult + emptyHandLandmarkerResultWithTimestampInMilliseconds:timestampInMilliseconds]; + } + + if (!handednessPacket.ValidateAsType>().ok() || + !landmarksPacket.ValidateAsType>().ok() || + !worldLandmarksPacket.ValidateAsType>().ok()) { + return [MPPHandLandmarkerResult + emptyHandLandmarkerResultWithTimestampInMilliseconds:timestampInMilliseconds]; + } + + return [MPPHandLandmarkerResult + handLandmarkerResultWithLandmarksProto:landmarksPacket + .Get>() + worldLandmarksProto:worldLandmarksPacket + .Get>() + handednessProto:handednessPacket + .Get>() + timestampInMilliSeconds:timestampInMilliseconds]; +} + +@end From ad499c170afd6e450ec1cc2872328b656efc9be9 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Wed, 31 May 2023 20:43:24 +0530 Subject: [PATCH 011/106] Added MPPConnection --- .../tasks/ios/components/containers/BUILD | 6 +++ .../containers/sources/MPPConnection.h | 44 +++++++++++++++++++ .../containers/sources/MPPConnection.m | 28 ++++++++++++ 3 files changed, 78 insertions(+) create mode 100644 mediapipe/tasks/ios/components/containers/sources/MPPConnection.h create mode 100644 mediapipe/tasks/ios/components/containers/sources/MPPConnection.m diff --git a/mediapipe/tasks/ios/components/containers/BUILD b/mediapipe/tasks/ios/components/containers/BUILD index 9ad5c22fd..0477d288a 100644 --- a/mediapipe/tasks/ios/components/containers/BUILD +++ b/mediapipe/tasks/ios/components/containers/BUILD @@ -60,3 +60,9 @@ objc_library( srcs = ["sources/MPPLandmark.m"], hdrs = ["sources/MPPLandmark.h"], ) + +objc_library( + name = "MPPConnection", + srcs = ["sources/MPPConnection.m"], + hdrs = ["sources/MPPConnection.h"], +) diff --git a/mediapipe/tasks/ios/components/containers/sources/MPPConnection.h b/mediapipe/tasks/ios/components/containers/sources/MPPConnection.h new file mode 100644 index 000000000..923599ee2 --- /dev/null +++ b/mediapipe/tasks/ios/components/containers/sources/MPPConnection.h @@ -0,0 +1,44 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import + +NS_ASSUME_NONNULL_BEGIN + +/** The value class representing a landmark connection. */ +NS_SWIFT_NAME(Connection) +@interface MPPConnection : NSObject + +@property(nonatomic, readonly) NSInteger start; + +@property(nonatomic, readonly) NSInteger end; + +/** + * Initializes a new `MPPConnection` with the start and end landmarks integer constants. + * + * @param start The integer representing the starting landmark of the connection. + * @param end The integer representing the ending landmark of the connection. + * + * @return An instance of `MPPConnection` initialized with the given start and end landmarks integer + * constants. + */ +- (instancetype)initWithStart:(NSInteger)star end:(NSInteger)end NS_DESIGNATED_INITIALIZER; + +- (instancetype)init NS_UNAVAILABLE; + ++ (instancetype)new NS_UNAVAILABLE; + +@end + +NS_ASSUME_NONNULL_END diff --git a/mediapipe/tasks/ios/components/containers/sources/MPPConnection.m b/mediapipe/tasks/ios/components/containers/sources/MPPConnection.m new file mode 100644 index 000000000..803ca4ca8 --- /dev/null +++ b/mediapipe/tasks/ios/components/containers/sources/MPPConnection.m @@ -0,0 +1,28 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import "mediapipe/tasks/ios/components/containers/sources/MPPConnection.h" + +@implementation MPPConnection + +- (instancetype)initWithStart:(NSInteger)start end:(NSInteger)end { + self = [super init]; + if (self) { + _start = start; + _end = end; + } + return self; +} + +@end From 0c33601510482f255cbbd5abfd7f9f4d46a31e15 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Wed, 31 May 2023 20:44:25 +0530 Subject: [PATCH 012/106] Added MPPHandLandmarker --- .../tasks/ios/vision/hand_landmarker/BUILD | 18 ++ .../sources/MPPHandLandmarker.h | 181 ++++++++++++++++++ 2 files changed, 199 insertions(+) create mode 100644 mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarker.h diff --git a/mediapipe/tasks/ios/vision/hand_landmarker/BUILD b/mediapipe/tasks/ios/vision/hand_landmarker/BUILD index 2665142af..482fae1d8 100644 --- a/mediapipe/tasks/ios/vision/hand_landmarker/BUILD +++ b/mediapipe/tasks/ios/vision/hand_landmarker/BUILD @@ -37,3 +37,21 @@ objc_library( "//mediapipe/tasks/ios/vision/core:MPPRunningMode", ], ) + +objc_library( + name = "MPPHandLandmarker", + hdrs = ["sources/MPPHandLandmarker.h"], + copts = [ + "-ObjC++", + "-std=c++17", + "-x objective-c++", + ], + module_name = "MPPHandLandmarker", + deps = [ + ":MPPHandLandmarkerOptions", + ":MPPHandLandmarkerResult", + "//mediapipe/tasks/ios/components/containers:MPPConnection", + "//mediapipe/tasks/ios/vision/core:MPPImage", + ], +) + diff --git a/mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarker.h b/mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarker.h new file mode 100644 index 000000000..4135b647f --- /dev/null +++ b/mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarker.h @@ -0,0 +1,181 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import + +#import "mediapipe/tasks/ios/components/containers/sources/MPPConnection.h" +#import "mediapipe/tasks/ios/vision/core/sources/MPPImage.h" +#import "mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarkerOptions.h" +#import "mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarkerResult.h" + +NS_ASSUME_NONNULL_BEGIN + +/** + * @brief Performs hand landmarks detection on images. + * + * This API expects a pre-trained hand landmarks model asset bundle. + */ +NS_SWIFT_NAME(HandLandmarker) +@interface MPPHandLandmarker : NSObject + +/** The connections between the landmarks in the palm. */ +@property(nonatomic, readonly) NSArray *handPalmConnections; + +/** The connections between the landmarks in the index finger. */ +@property(nonatomic, readonly) NSArray *handIndexFingerConnections; + +/** The connections between the landmarks in the middle finger. */ +@property(nonatomic, readonly) NSArray *handMiddleFingerConnections; + +/** The connections between the landmarks in the ring finger. */ +@property(nonatomic, readonly) NSArray *handRingFingerConnections; + +/** The connections between the landmarks in the pinky. */ +@property(nonatomic, readonly) NSArray *handPinkyConnections; + +/** The connections between all the landmarks in the hand. */ +@property(nonatomic, readonly) NSArray *handConnections; + +/** + * Creates a new instance of `MPPHandLandmarker` from an absolute path to a model asset bundle + * stored locally on the device and the default `MPPHandLandmarkerOptions`. + * + * @param modelPath An absolute path to a model asset bundle stored locally on the device. + * @param error An optional error parameter populated when there is an error in initializing the + * hand landmarker. + * + * @return A new instance of `MPPHandLandmarker` with the given model path. `nil` if there is an + * error in initializing the hand landmarker. + */ +- (nullable instancetype)initWithModelPath:(NSString *)modelPath error:(NSError **)error; + +/** + * Creates a new instance of `MPPHandLandmarker` from the given `MPPHandLandmarkerOptions`. + * + * @param options The options of type `MPPHandLandmarkerOptions` to use for configuring the + * `MPPHandLandmarker`. + * @param error An optional error parameter populated when there is an error in initializing the + * hand landmarker. + * + * @return A new instance of `MPPHandLandmarker` with the given options. `nil` if there is an + * error in initializing the hand landmarker. + */ +- (nullable instancetype)initWithOptions:(MPPHandLandmarkerOptions *)options + error:(NSError **)error NS_DESIGNATED_INITIALIZER; + +/** + * Performs hand landmarks detection on the provided `MPPImage` using the whole image as region of + * interest. Rotation will be applied according to the `orientation` property of the provided + * `MPPImage`. Only use this method when the `MPPHandLandmarker` is created with + * `MPPRunningModeImage`. + * + * This method supports performing hand landmarks detection on RGBA images. If your `MPPImage` has a + * source type of `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the + * underlying pixel buffer must have one of the following pixel format types: + * 1. kCVPixelFormatType_32BGRA + * 2. kCVPixelFormatType_32RGBA + * + * If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is + * RGB with an Alpha channel. + * + * @param image The `MPPImage` on which hand landmarks detection is to be performed. + * @param error An optional error parameter populated when there is an error in performing hand + * landmarks detection on the input image. + * + * @return An `MPPHandLandmarkerResult` object that contains the hand hand landmarks detection + * results. + */ +- (nullable MPPHandLandmarkerResult *)detectInImage:(MPPImage *)image + error:(NSError **)error NS_SWIFT_NAME(detect(image:)); + +/** + * Performs hand landmarks detection on the provided video frame of type `MPPImage` using the whole + * image as region of interest. Rotation will be applied according to the `orientation` property of + * the provided `MPPImage`. Only use this method when the `MPPHandLandmarker` is created with + * `MPPRunningModeVideo`. + * + * It's required to provide the video frame's timestamp (in milliseconds). The input timestamps must + * be monotonically increasing. + * + * This method supports performing hand landmarks detection on RGBA images. If your `MPPImage` has a + * source type of `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the + * underlying pixel buffer must have one of the following pixel format types: + * 1. kCVPixelFormatType_32BGRA + * 2. kCVPixelFormatType_32RGBA + * + * If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is + * RGB with an Alpha channel. + * + * @param image The `MPPImage` on which hand landmarks detection is to be performed. + * @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input + * timestamps must be monotonically increasing. + * @param error An optional error parameter populated when there is an error in performing hand + * landmarks detection on the input video frame. + * + * @return An `MPPHandLandmarkerResult` object that contains the hand hand landmarks detection + * results. + */ +- (nullable MPPHandLandmarkerResult *)detectInVideoFrame:(MPPImage *)image + timestampInMilliseconds:(NSInteger)timestampInMilliseconds + error:(NSError **)error + NS_SWIFT_NAME(detect(videoFrame:timestampInMilliseconds:)); + +/** + * Sends live stream image data of type `MPPImage` to perform hand landmarks detection using the + * whole image as region of interest. Rotation will be applied according to the `orientation` + * property of the provided `MPPImage`. Only use this method when the `MPPHandLandmarker` is created + * with `MPPRunningModeLiveStream`. + * + * The object which needs to be continuously notified of the available results of hand landmarks + * detection must confirm to `MPPHandLandmarkerLiveStreamDelegate` protocol and implement the + * `handLandmarker:didFinishDetectionWithResult:timestampInMilliseconds:error:` + * delegate method. + * + * It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent + * to the hand landmarker. The input timestamps must be monotonically increasing. + * + * This method supports performing hand landmarks detection on RGBA images. If your `MPPImage` has a + * source type of `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the + * underlying pixel buffer must have one of the following pixel format types: + * 1. kCVPixelFormatType_32BGRA + * 2. kCVPixelFormatType_32RGBA + * + * If the input `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color + * space is RGB with an Alpha channel. + * + * If this method is used for performing hand landmarks detection on live camera frames using + * `AVFoundation`, ensure that you request `AVCaptureVideoDataOutput` to output frames in + * `kCMPixelFormat_32RGBA` using its `videoSettings` property. + * + * @param image A live stream image data of type `MPPImage` on which hand landmarks detection is to + * be performed. + * @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input + * image is sent to the hand landmarker. The input timestamps must be monotonically increasing. + * @param error An optional error parameter populated when there is an error in performing hand + * landmarks detection on the input live stream image data. + * + * @return `YES` if the image was sent to the task successfully, otherwise `NO`. + */ +- (BOOL)detectAsyncInImage:(MPPImage *)image + timestampInMilliseconds:(NSInteger)timestampInMilliseconds + error:(NSError **)error + NS_SWIFT_NAME(detectAsync(image:timestampInMilliseconds:)); + +- (instancetype)init NS_UNAVAILABLE; + ++ (instancetype)new NS_UNAVAILABLE; + +@end + +NS_ASSUME_NONNULL_END From 4326c97c95dddf4720d64758db3b861bcda6e0eb Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Wed, 31 May 2023 20:44:35 +0530 Subject: [PATCH 013/106] Added MPPHandLandmark --- .../tasks/ios/vision/hand_landmarker/BUILD | 5 ++ .../hand_landmarker/sources/MPPHandLandmark.h | 65 +++++++++++++++++++ 2 files changed, 70 insertions(+) create mode 100644 mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmark.h diff --git a/mediapipe/tasks/ios/vision/hand_landmarker/BUILD b/mediapipe/tasks/ios/vision/hand_landmarker/BUILD index 482fae1d8..6c815f28d 100644 --- a/mediapipe/tasks/ios/vision/hand_landmarker/BUILD +++ b/mediapipe/tasks/ios/vision/hand_landmarker/BUILD @@ -55,3 +55,8 @@ objc_library( ], ) +objc_library( + name = "MPPHandLandmark", + hdrs = ["sources/MPPHandLandmark.h"], + module_name = "MPPHandLandmark", +) diff --git a/mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmark.h b/mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmark.h new file mode 100644 index 000000000..fe08bde7e --- /dev/null +++ b/mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmark.h @@ -0,0 +1,65 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import + +NS_ASSUME_NONNULL_BEGIN + +/** + * The enum containing the 21 hand landmarks. + */ +typedef NS_ENUM(NSUInteger, MPPHandLandmark) { + MPPHandLandmarkWrist, + + MPPHandLandmarkThumbCMC, + + MPPHandLandmarkThumbMCP, + + MPPHandLandmarkThumbIP, + + MPPHandLandmarkIndexFingerMCP, + + MPPHandLandmarkIndexFingerPIP, + + MPPHandLandmarkIndexFingerDIP, + + MPPHandLandmarkIndexFingerTIP, + + MPPHandLandmarkMiddleFingerMCP, + + MPPHandLandmarkMiddleFingerPIP, + + MPPHandLandmarkMiddleFingerDIP, + + MPPHandLandmarkMiddleFingerTIP, + + MPPHandLandmarkRingFingerMCP, + + MPPHandLandmarkRingFingerPIP, + + MPPHandLandmarkRingFingerDIP, + + MPPHandLandmarkRingFingerTIP, + + MPPHandLandmarkPinkyMCP, + + MPPHandLandmarkPinkyPIP, + + MPPHandLandmarkPinkyDIP, + + MPPHandLandmarkPinkyTIP, + +} NS_SWIFT_NAME(HandLandmark); + +NS_ASSUME_NONNULL_END From ebeffc27eb082ebed06520d3a72951c80f21f06d Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Wed, 31 May 2023 20:51:43 +0530 Subject: [PATCH 014/106] Renamed iOS gesture recognizer protobuf utils --- mediapipe/tasks/ios/test/vision/gesture_recognizer/BUILD | 2 +- .../gesture_recognizer/MPPGestureRecognizerTests.m | 9 +-------- .../tasks/ios/test/vision/gesture_recognizer/utils/BUILD | 6 +++--- ...rs.h => MPPGestureRecognizerResult+ProtobufHelpers.h} | 2 +- ....mm => MPPGestureRecognizerResult+ProtobufHelpers.mm} | 2 +- 5 files changed, 7 insertions(+), 14 deletions(-) rename mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/{MPPGestureRecognizerResult+ProtoHelpers.h => MPPGestureRecognizerResult+ProtobufHelpers.h} (95%) rename mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/{MPPGestureRecognizerResult+ProtoHelpers.mm => MPPGestureRecognizerResult+ProtobufHelpers.mm} (98%) diff --git a/mediapipe/tasks/ios/test/vision/gesture_recognizer/BUILD b/mediapipe/tasks/ios/test/vision/gesture_recognizer/BUILD index 5be17a26c..6b53c9005 100644 --- a/mediapipe/tasks/ios/test/vision/gesture_recognizer/BUILD +++ b/mediapipe/tasks/ios/test/vision/gesture_recognizer/BUILD @@ -42,7 +42,7 @@ objc_library( "//mediapipe/tasks/ios/common:MPPCommon", "//mediapipe/tasks/ios/test/vision/utils:MPPImageTestUtils", "//mediapipe/tasks/ios/vision/gesture_recognizer:MPPGestureRecognizer", - "//mediapipe/tasks/ios/test/vision/gesture_recognizer/utils:MPPGestureRecognizerResultProtoHelpers", + "//mediapipe/tasks/ios/test/vision/gesture_recognizer/utils:MPPGestureRecognizerResultProtobufHelpers", ] + select({ "//third_party:opencv_ios_sim_arm64_source_build": ["@ios_opencv_source//:opencv_xcframework"], "//third_party:opencv_ios_arm64_source_build": ["@ios_opencv_source//:opencv_xcframework"], diff --git a/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m b/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m index 1a48322b4..2fb2c8a5b 100644 --- a/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m +++ b/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m @@ -15,7 +15,7 @@ #import #import "mediapipe/tasks/ios/common/sources/MPPCommon.h" -#import "mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtoHelpers.h" +#import "mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtobufHelpers.h" #import "mediapipe/tasks/ios/test/vision/utils/sources/MPPImage+TestUtils.h" #import "mediapipe/tasks/ios/vision/gesture_recognizer/sources/MPPGestureRecognizer.h" @@ -64,13 +64,6 @@ static const float kLandmarksErrorTolerance = 0.03f; XCTAssertEqualWithAccuracy(landmark.y, expectedLandmark.y, kLandmarksErrorTolerance, \ @"hand index = %d landmark index j = %d", handIndex, landmarkIndex); -#define AssertApproximatelyEqualMultiHandLandmarks(multiHandLandmarks, expectedMultiHandLandmars) \ - XCTAssertEqual(multiHandLandmarks.count, expectedMultiHandLandmars.count) \ - XCTAssertEqualWithAccuracy(landmark.x, expectedLandmark.x, kLandmarksErrorTolerance, \ - @"hand index = %d landmark index j = %d", handIndex, handIndex); \ - XCTAssertEqualWithAccuracy(landmark.y, expectedLandmark.y, kLandmarksErrorTolerance, \ - @"hand index = %d landmark index j = %d", handIndex, handIndex); - #define AssertGestureRecognizerResultIsEmpty(gestureRecognizerResult) \ XCTAssertTrue(gestureRecognizerResult.gestures.count == 0); \ XCTAssertTrue(gestureRecognizerResult.handedness.count == 0); \ diff --git a/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/BUILD b/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/BUILD index ddac21ed2..584d3e441 100644 --- a/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/BUILD +++ b/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/BUILD @@ -3,9 +3,9 @@ package(default_visibility = ["//mediapipe/tasks:internal"]) licenses(["notice"]) objc_library( - name = "MPPGestureRecognizerResultProtoHelpers", - srcs = ["sources/MPPGestureRecognizerResult+ProtoHelpers.mm"], - hdrs = ["sources/MPPGestureRecognizerResult+ProtoHelpers.h"], + name = "MPPGestureRecognizerResultProtobufHelpers", + srcs = ["sources/MPPGestureRecognizerResult+ProtobufHelpers.mm"], + hdrs = ["sources/MPPGestureRecognizerResult+ProtobufHelpers.h"], copts = [ "-ObjC++", "-std=c++17", diff --git a/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtoHelpers.h b/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtobufHelpers.h similarity index 95% rename from mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtoHelpers.h rename to mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtobufHelpers.h index 6bb2e5182..cfa0a5e53 100644 --- a/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtoHelpers.h +++ b/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtobufHelpers.h @@ -16,7 +16,7 @@ #import "mediapipe/tasks/ios/vision/gesture_recognizer/sources/MPPGestureRecognizerResult.h" NS_ASSUME_NONNULL_BEGIN -@interface MPPGestureRecognizerResult (ProtoHelpers) +@interface MPPGestureRecognizerResult (ProtobufHelpers) + (MPPGestureRecognizerResult *) gestureRecognizerResultsFromTextEncodedProtobufFileWithName:(NSString *)fileName diff --git a/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtoHelpers.mm b/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtobufHelpers.mm similarity index 98% rename from mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtoHelpers.mm rename to mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtobufHelpers.mm index ce3a262b8..a4f6de5e1 100644 --- a/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtoHelpers.mm +++ b/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtobufHelpers.mm @@ -29,7 +29,7 @@ using LandmarksDetectionResultProto = using ::mediapipe::tasks::ios::test::vision::utils::get_proto_from_pbtxt; } // anonymous namespace -@implementation MPPGestureRecognizerResult (ProtoHelpers) +@implementation MPPGestureRecognizerResult (ProtobufHelpers) + (MPPGestureRecognizerResult *) gestureRecognizerResultsFromTextEncodedProtobufFileWithName:(NSString *)fileName From 77bb5e72024f5906b5d1e1eeb3b7202aade93b48 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Wed, 31 May 2023 20:53:36 +0530 Subject: [PATCH 015/106] Fixed import in iOS gesture recognizer test utils --- .../utils/sources/MPPGestureRecognizerResult+ProtobufHelpers.mm | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtobufHelpers.mm b/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtobufHelpers.mm index a4f6de5e1..7cb1de12d 100644 --- a/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtobufHelpers.mm +++ b/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtobufHelpers.mm @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -#import "mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtoHelpers.h" +#import "mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtobufHelpers.h" #import "mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.h" #import "mediapipe/tasks/ios/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+Helpers.h" From 961afc8928a3f5da82a00d5024a441f5a7925aa2 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Thu, 1 Jun 2023 16:43:33 +0530 Subject: [PATCH 016/106] Updated documentation in MPPHandLandmarkerResult Helpers --- .../utils/sources/MPPHandLandmarkerResult+Helpers.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mediapipe/tasks/ios/vision/hand_landmarker/utils/sources/MPPHandLandmarkerResult+Helpers.h b/mediapipe/tasks/ios/vision/hand_landmarker/utils/sources/MPPHandLandmarkerResult+Helpers.h index fa7bd42ce..dea8a7d4d 100644 --- a/mediapipe/tasks/ios/vision/hand_landmarker/utils/sources/MPPHandLandmarkerResult+Helpers.h +++ b/mediapipe/tasks/ios/vision/hand_landmarker/utils/sources/MPPHandLandmarkerResult+Helpers.h @@ -41,7 +41,7 @@ static const int kMicroSecondsPerMilliSecond = 1000; handednessPacket:(const mediapipe::Packet &)handednessPacket; /** - * Creates an `MPPHandLandmarkerResult` from handedness, landmarks and world landmarks proto + * Creates an `MPPHandLandmarkerResult` from landmarks, world landmarks and handedness proto * vectors. * * @param landmarksProto A vector of protos of type `std::vector`. From e2f899e15141dd7d5f42ab0787c79aa78919108e Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Thu, 1 Jun 2023 16:44:57 +0530 Subject: [PATCH 017/106] Updated documentation in MPPHandLandmarkResult+Helpers.h --- .../utils/sources/MPPHandLandmarkerResult+Helpers.h | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/mediapipe/tasks/ios/vision/hand_landmarker/utils/sources/MPPHandLandmarkerResult+Helpers.h b/mediapipe/tasks/ios/vision/hand_landmarker/utils/sources/MPPHandLandmarkerResult+Helpers.h index dea8a7d4d..51099d341 100644 --- a/mediapipe/tasks/ios/vision/hand_landmarker/utils/sources/MPPHandLandmarkerResult+Helpers.h +++ b/mediapipe/tasks/ios/vision/hand_landmarker/utils/sources/MPPHandLandmarkerResult+Helpers.h @@ -27,10 +27,9 @@ static const int kMicroSecondsPerMilliSecond = 1000; /** * Creates an `MPPHandLandmarkerResult` from landmarks, world landmarks and handedness packets. * - * @param landmarksPacket A MediaPipe packet wrapping - * a`std::vector`. - * @param worldLandmarksPacket A MediaPipe packet wrapping a`std::vector`. - * @param handednessPacket a MediaPipe packet wrapping a`std::vector`. + * @param landmarksPacket A MediaPipe packet wrapping a `std::vector`. + * @param worldLandmarksPacket A MediaPipe packet wrapping a `std::vector`. + * @param handednessPacket a MediaPipe packet wrapping a `std::vector`. * * @return An `MPPHandLandmarkerResult` object that contains the hand landmark detection * results. From 9356dfcd4635c926e8af5e5b61bad643f74ea5a9 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Thu, 1 Jun 2023 16:53:32 +0530 Subject: [PATCH 018/106] Updated MPPHandLandmarker.h to return the hand connections via class mathods --- .../sources/MPPHandLandmarker.h | 60 +++++++++++++------ 1 file changed, 42 insertions(+), 18 deletions(-) diff --git a/mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarker.h b/mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarker.h index 4135b647f..5149ec0ac 100644 --- a/mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarker.h +++ b/mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarker.h @@ -29,24 +29,6 @@ NS_ASSUME_NONNULL_BEGIN NS_SWIFT_NAME(HandLandmarker) @interface MPPHandLandmarker : NSObject -/** The connections between the landmarks in the palm. */ -@property(nonatomic, readonly) NSArray *handPalmConnections; - -/** The connections between the landmarks in the index finger. */ -@property(nonatomic, readonly) NSArray *handIndexFingerConnections; - -/** The connections between the landmarks in the middle finger. */ -@property(nonatomic, readonly) NSArray *handMiddleFingerConnections; - -/** The connections between the landmarks in the ring finger. */ -@property(nonatomic, readonly) NSArray *handRingFingerConnections; - -/** The connections between the landmarks in the pinky. */ -@property(nonatomic, readonly) NSArray *handPinkyConnections; - -/** The connections between all the landmarks in the hand. */ -@property(nonatomic, readonly) NSArray *handConnections; - /** * Creates a new instance of `MPPHandLandmarker` from an absolute path to a model asset bundle * stored locally on the device and the default `MPPHandLandmarkerOptions`. @@ -174,6 +156,48 @@ NS_SWIFT_NAME(HandLandmarker) - (instancetype)init NS_UNAVAILABLE; +/** + * Returns the connections between the landmarks in the palm. + * + * @return An array of connections between the landmarks in the palm. + */ ++ (NSArray *)handPalmConnections; + +/** + * Returns the connections between the landmarks in the index finger. + * + * @return An array of connections between the landmarks in the index finger. + */ ++ (NSArray *)handIndexFingerConnections; + +/** + * Returns the connections between the landmarks in the middle finger. + * + * @return An array of connections between the landmarks in the middle finger. + */ ++ (NSArray *)handMiddleFingerConnections; + +/** + * Returns the connections between the landmarks in the ring finger. + * + * @return An array of connections between the landmarks in the ring finger. + */ ++ (NSArray *)handRingFingerConnections; + +/** + * Returns the connections between the landmarks in the pinky. + * + * @return An array of connections between the landmarks in the pinky. + */ ++ (NSArray *)handPinkyConnections; + +/** + * Returns the connections between all the landmarks in the hand. + * + * @return An array of connections between all the landmarks in the hand. + */ ++ (NSArray *)handConnections; + + (instancetype)new NS_UNAVAILABLE; @end From 0c2a7bee09b4d03b95af8a3ebc457c5042663542 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Mon, 5 Jun 2023 13:19:39 +0530 Subject: [PATCH 019/106] Update iOS Gesture Recognizer error assertion --- .../vision/gesture_recognizer/MPPGestureRecognizerTests.m | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m b/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m index 2fb2c8a5b..6bde8166f 100644 --- a/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m +++ b/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m @@ -48,10 +48,8 @@ static const float kLandmarksErrorTolerance = 0.03f; XCTAssertNotNil(error); \ XCTAssertEqualObjects(error.domain, expectedError.domain); \ XCTAssertEqual(error.code, expectedError.code); \ - XCTAssertNotEqual( \ - [error.localizedDescription rangeOfString:expectedError.localizedDescription].location, \ - NSNotFound) - + XCTAssertEqualObjects( error.localizedDescription, expectedError.localizedDescription) + #define AssertEqualGestures(gesture, expectedGesture, handIndex, gestureIndex) \ XCTAssertEqual(gesture.index, kGestureExpectedIndex, @"hand index = %d gesture index j = %d", \ handIndex, gestureIndex); \ From 56a035cb1bfddf1e123be05f90cf17976bdbf94b Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Mon, 5 Jun 2023 13:21:39 +0530 Subject: [PATCH 020/106] Updated method names in MPPGestureRecognizer --- .../gesture_recognizer/MPPGestureRecognizerTests.m | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m b/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m index 6bde8166f..43d4b6e72 100644 --- a/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m +++ b/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m @@ -49,7 +49,7 @@ static const float kLandmarksErrorTolerance = 0.03f; XCTAssertEqualObjects(error.domain, expectedError.domain); \ XCTAssertEqual(error.code, expectedError.code); \ XCTAssertEqualObjects( error.localizedDescription, expectedError.localizedDescription) - + #define AssertEqualGestures(gesture, expectedGesture, handIndex, gestureIndex) \ XCTAssertEqual(gesture.index, kGestureExpectedIndex, @"hand index = %d gesture index j = %d", \ handIndex, gestureIndex); \ @@ -103,7 +103,7 @@ static const float kLandmarksErrorTolerance = 0.03f; } - (void)assertMultiHandLandmarks:(NSArray *> *)multiHandLandmarks - isApproximatelyEqualToExpectedMultiHandLandmarks: + areApproximatelyEqualToExpectedMultiHandLandmarks: (NSArray *> *)expectedMultiHandLandmarks { XCTAssertEqual(multiHandLandmarks.count, expectedMultiHandLandmarks.count); if (multiHandLandmarks.count == 0) { @@ -122,7 +122,7 @@ static const float kLandmarksErrorTolerance = 0.03f; } - (void)assertMultiHandWorldLandmarks:(NSArray *> *)multiHandWorldLandmarks - isApproximatelyEqualToExpectedMultiHandWorldLandmarks: + areApproximatelyEqualToExpectedMultiHandWorldLandmarks: (NSArray *> *)expectedMultiHandWorldLandmarks { XCTAssertEqual(multiHandWorldLandmarks.count, expectedMultiHandWorldLandmarks.count); if (expectedMultiHandWorldLandmarks.count == 0) { @@ -141,7 +141,7 @@ static const float kLandmarksErrorTolerance = 0.03f; } - (void)assertMultiHandGestures:(NSArray *> *)multiHandGestures - isApproximatelyEqualToExpectedMultiHandGestures: + areApproximatelyEqualToExpectedMultiHandGestures: (NSArray *> *)expectedMultiHandGestures { XCTAssertEqual(multiHandGestures.count, expectedMultiHandGestures.count); if (multiHandGestures.count == 0) { @@ -163,12 +163,12 @@ static const float kLandmarksErrorTolerance = 0.03f; isApproximatelyEqualToExpectedResult: (MPPGestureRecognizerResult *)expectedGestureRecognizerResult { [self assertMultiHandLandmarks:gestureRecognizerResult.landmarks - isApproximatelyEqualToExpectedMultiHandLandmarks:expectedGestureRecognizerResult.landmarks]; + areApproximatelyEqualToExpectedMultiHandLandmarks:expectedGestureRecognizerResult.landmarks]; [self assertMultiHandWorldLandmarks:gestureRecognizerResult.worldLandmarks - isApproximatelyEqualToExpectedMultiHandWorldLandmarks:expectedGestureRecognizerResult + areApproximatelyEqualToExpectedMultiHandWorldLandmarks:expectedGestureRecognizerResult .worldLandmarks]; [self assertMultiHandGestures:gestureRecognizerResult.gestures - isApproximatelyEqualToExpectedMultiHandGestures:expectedGestureRecognizerResult.gestures]; + areApproximatelyEqualToExpectedMultiHandGestures:expectedGestureRecognizerResult.gestures]; } #pragma mark File From db0da30f187be47ad736d7b8c26d2947cbd91753 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Mon, 5 Jun 2023 13:26:17 +0530 Subject: [PATCH 021/106] Updated comments --- .../sources/MPPGestureRecognizerResult+ProtobufHelpers.mm | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtobufHelpers.mm b/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtobufHelpers.mm index 7cb1de12d..6bbade941 100644 --- a/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtobufHelpers.mm +++ b/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtobufHelpers.mm @@ -42,9 +42,8 @@ using ::mediapipe::tasks::ios::test::vision::utils::get_proto_from_pbtxt; } if (removeZPosition) { - // Remove z position of landmarks, because they are not used in correctness - // testing. For video or live stream mode, the z positions varies a lot during - // tracking from frame to frame. + // Remove z position of landmarks, because they are not used in correctness testing. For video + // or live stream mode, the z positions varies a lot during tracking from frame to frame. for (int i = 0; i < landmarkDetectionResultProto.landmarks().landmark().size(); i++) { auto &landmark = *landmarkDetectionResultProto.mutable_landmarks()->mutable_landmark(i); landmark.clear_z(); From 32195e6a8332ac426358355db28aeee9e1052186 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Mon, 5 Jun 2023 13:39:44 +0530 Subject: [PATCH 022/106] Updated MPPGestureRecognizerTests to use generics for file path dicts --- .../MPPGestureRecognizerTests.m | 23 +++++++++++-------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m b/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m index 43d4b6e72..4331c5561 100644 --- a/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m +++ b/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m @@ -19,20 +19,25 @@ #import "mediapipe/tasks/ios/test/vision/utils/sources/MPPImage+TestUtils.h" #import "mediapipe/tasks/ios/vision/gesture_recognizer/sources/MPPGestureRecognizer.h" + +static NSString *const kPbFileExtension = @"pbtxt"; + +typedef NSDictionary ResourceFileInfo; + static NSDictionary *const kGestureRecognizerBundleAssetFile = @{@"name" : @"gesture_recognizer", @"type" : @"task"}; -static NSDictionary *const kTwoHandsImage = @{@"name" : @"right_hands", @"type" : @"jpg"}; -static NSDictionary *const kFistImage = @{@"name" : @"fist", @"type" : @"jpg"}; -static NSDictionary *const kNoHandsImage = @{@"name" : @"cats_and_dogs", @"type" : @"jpg"}; -static NSDictionary *const kThumbUpImage = @{@"name" : @"thumb_up", @"type" : @"jpg"}; -static NSDictionary *const kPointingUpRotatedImage = +static ResourceFileInfo *const kTwoHandsImage = @{@"name" : @"right_hands", @"type" : @"jpg"}; +static ResourceFileInfo *const kFistImage = @{@"name" : @"fist", @"type" : @"jpg"}; +static ResourceFileInfo *const kNoHandsImage = @{@"name" : @"cats_and_dogs", @"type" : @"jpg"}; +static ResourceFileInfo *const kThumbUpImage = @{@"name" : @"thumb_up", @"type" : @"jpg"}; +static ResourceFileInfo *const kPointingUpRotatedImage = @{@"name" : @"pointing_up_rotated", @"type" : @"jpg"}; -static NSDictionary *const kExpectedFistLandmarksFile = - @{@"name" : @"fist_landmarks", @"type" : @"pbtxt"}; -static NSDictionary *const kExpectedThumbUpLandmarksFile = - @{@"name" : @"thumb_up_landmarks", @"type" : @"pbtxt"}; +static ResourceFileInfo *const kExpectedFistLandmarksFile = + @{@"name" : @"fist_landmarks", @"type" : kPbFileExtension}; +static ResourceFileInfo *const kExpectedThumbUpLandmarksFile = + @{@"name" : @"thumb_up_landmarks", @"type" : kPbFileExtension}; static NSString *const kFistLabel = @"Closed_Fist"; static NSString *const kExpectedThumbUpLabel = @"Thumb_Up"; From 1496b7c2d447297bc754c24d0b250659083e48f9 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Mon, 5 Jun 2023 13:40:56 +0530 Subject: [PATCH 023/106] Updated MPPGestureRecognizer tests to use generics --- .../gesture_recognizer/MPPGestureRecognizerTests.m | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m b/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m index 4331c5561..15b285b82 100644 --- a/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m +++ b/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m @@ -178,7 +178,7 @@ static const float kLandmarksErrorTolerance = 0.03f; #pragma mark File -+ (NSString *)filePathWithFileInfo:(NSDictionary *)fileInfo { ++ (NSString *)filePathWithFileInfo:(ResourceFileInfo *)fileInfo { NSString *filePath = [MPPGestureRecognizerTests filePathWithName:fileInfo[@"name"] extension:fileInfo[@"type"]]; return filePath; @@ -193,7 +193,7 @@ static const float kLandmarksErrorTolerance = 0.03f; #pragma mark Gesture Recognizer Initializers - (MPPGestureRecognizerOptions *)gestureRecognizerOptionsWithModelFileInfo: - (NSDictionary *)modelFileInfo { + (ResourceFileInfo *)modelFileInfo { NSString *modelPath = [MPPGestureRecognizerTests filePathWithFileInfo:modelFileInfo]; MPPGestureRecognizerOptions *gestureRecognizerOptions = [[MPPGestureRecognizerOptions alloc] init]; @@ -224,7 +224,7 @@ static const float kLandmarksErrorTolerance = 0.03f; #pragma mark Assert Gesture Recognizer Results -- (MPPImage *)imageWithFileInfo:(NSDictionary *)fileInfo { +- (MPPImage *)imageWithFileInfo:(ResourceFileInfo *)fileInfo { MPPImage *image = [MPPImage imageFromBundleWithClass:[MPPGestureRecognizerTests class] fileName:fileInfo[@"name"] ofType:fileInfo[@"type"]]; @@ -233,7 +233,7 @@ static const float kLandmarksErrorTolerance = 0.03f; return image; } -- (MPPImage *)imageWithFileInfo:(NSDictionary *)fileInfo +- (MPPImage *)imageWithFileInfo:(ResourceFileInfo *)fileInfo orientation:(UIImageOrientation)orientation { MPPImage *image = [MPPImage imageFromBundleWithClass:[MPPGestureRecognizerTests class] fileName:fileInfo[@"name"] @@ -244,7 +244,7 @@ static const float kLandmarksErrorTolerance = 0.03f; return image; } -- (MPPGestureRecognizerResult *)recognizeImageWithFileInfo:(NSDictionary *)imageFileInfo +- (MPPGestureRecognizerResult *)recognizeImageWithFileInfo:(ResourceFileInfo *)imageFileInfo usingGestureRecognizer: (MPPGestureRecognizer *)gestureRecognizer { MPPImage *mppImage = [self imageWithFileInfo:imageFileInfo]; @@ -255,7 +255,7 @@ static const float kLandmarksErrorTolerance = 0.03f; return gestureRecognizerResult; } -- (void)assertResultsOfRecognizeImageWithFileInfo:(NSDictionary *)fileInfo +- (void)assertResultsOfRecognizeImageWithFileInfo:(ResourceFileInfo *)fileInfo usingGestureRecognizer:(MPPGestureRecognizer *)gestureRecognizer approximatelyEqualsGestureRecognizerResult: (MPPGestureRecognizerResult *)expectedGestureRecognizerResult { From f213e0a6f3618c86acf49c83dbe85c306f656280 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Mon, 5 Jun 2023 13:47:11 +0530 Subject: [PATCH 024/106] Fixed typos --- .../MPPGestureRecognizerResult+Helpers.h | 16 +++++++-------- .../MPPGestureRecognizerResult+Helpers.mm | 20 +++++++++---------- 2 files changed, 18 insertions(+), 18 deletions(-) diff --git a/mediapipe/tasks/ios/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+Helpers.h b/mediapipe/tasks/ios/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+Helpers.h index 5e75febf3..231bb16d8 100644 --- a/mediapipe/tasks/ios/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+Helpers.h +++ b/mediapipe/tasks/ios/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+Helpers.h @@ -58,14 +58,14 @@ static const int kMicroSecondsPerMilliSecond = 1000; + (MPPGestureRecognizerResult *) gestureRecognizerResultWithHandGesturesProto: (const std::vector &)handGesturesProto - handednessroto: - (const std::vector &) - handednessProto - handLandmarksPacket: - (const std::vector &) - handLandmarksProto - worldLandmarksPacket: - (const std::vector &)worldLandmarksProto + handednessProto: + (const std::vector &) + handednessProto + handLandmarksProto: + (const std::vector &) + handLandmarksProto + worldLandmarksProto: + (const std::vector &)worldLandmarksProto timestampInMilliSeconds:(NSInteger)timestampInMilliseconds; @end diff --git a/mediapipe/tasks/ios/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+Helpers.mm b/mediapipe/tasks/ios/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+Helpers.mm index 5162dd891..4f3411c66 100644 --- a/mediapipe/tasks/ios/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+Helpers.mm +++ b/mediapipe/tasks/ios/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+Helpers.mm @@ -40,12 +40,12 @@ static const NSInteger kDefaultGestureIndex = -1; + (MPPGestureRecognizerResult *) gestureRecognizerResultWithHandGesturesProto: (const std::vector &)handGesturesProto - handednessroto: - (const std::vector &)handednessProto - handLandmarksPacket:(const std::vector &) + handednessProto: + (const std::vector &)handednessProto + handLandmarksroto:(const std::vector &) handLandmarksProto - worldLandmarksPacket: - (const std::vector &)worldLandmarksProto + worldLandmarksProto: + (const std::vector &)worldLandmarksProto timestampInMilliSeconds:(NSInteger)timestampInMilliseconds { NSMutableArray *> *multiHandGestures = [NSMutableArray arrayWithCapacity:(NSUInteger)handGesturesProto.size()]; @@ -55,7 +55,7 @@ static const NSInteger kDefaultGestureIndex = -1; arrayWithCapacity:(NSUInteger)classificationListProto.classification().size()]; for (const auto &classificationProto : classificationListProto.classification()) { MPPCategory *category = [MPPCategory categoryWithProto:classificationProto - index:kDefaultGestureIndex]; + index:kDefaultGestureIndex]; [gestures addObject:category]; } [multiHandGestures addObject:gestures]; @@ -135,11 +135,11 @@ static const NSInteger kDefaultGestureIndex = -1; return [MPPGestureRecognizerResult gestureRecognizerResultWithHandGesturesProto:handGesturesPacket .Get>() - handednessroto:handednessPacket + handednessProto:handednessPacket .Get>() - handLandmarksPacket:handLandmarksPacket - .Get>() - worldLandmarksPacket:worldLandmarksPacket + handLandmarksProto:handLandmarksPacket.Get< + std::vector>() + worldLandmarksProto:worldLandmarksPacket .Get>() timestampInMilliSeconds:timestampInMilliseconds]; } From d256a3e67074f09562c54040da54216f39c04ada Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Mon, 5 Jun 2023 21:13:04 +0530 Subject: [PATCH 025/106] Updated dictionary to generics in iOS gesture recognizer tests --- .../test/vision/gesture_recognizer/MPPGestureRecognizerTests.m | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m b/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m index 15b285b82..55180f477 100644 --- a/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m +++ b/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m @@ -24,7 +24,7 @@ static NSString *const kPbFileExtension = @"pbtxt"; typedef NSDictionary ResourceFileInfo; -static NSDictionary *const kGestureRecognizerBundleAssetFile = +static ResourceFileInfo *const kGestureRecognizerBundleAssetFile = @{@"name" : @"gesture_recognizer", @"type" : @"task"}; static ResourceFileInfo *const kTwoHandsImage = @{@"name" : @"right_hands", @"type" : @"jpg"}; From 709eb812cc2f593067f7fdc23a481390a70688e3 Mon Sep 17 00:00:00 2001 From: Fergus Henderson Date: Tue, 6 Jun 2023 09:58:18 -0700 Subject: [PATCH 026/106] Internal change PiperOrigin-RevId: 538215311 --- mediapipe/util/tflite/BUILD | 9 ++-- mediapipe/util/tflite/op_resolver.cc | 81 ++++++++++++++++++++-------- 2 files changed, 65 insertions(+), 25 deletions(-) diff --git a/mediapipe/util/tflite/BUILD b/mediapipe/util/tflite/BUILD index 59663c9ba..f31c23696 100644 --- a/mediapipe/util/tflite/BUILD +++ b/mediapipe/util/tflite/BUILD @@ -67,17 +67,20 @@ cc_library( ], ) -# TODO: Re-evaluate which of these libraries we can avoid making -# cc_library_with_tflite and can be changed back to cc_library. +# This target has an implementation dependency on TFLite/TFLite-in-GMSCore, +# but it does not have any API dependency on TFLite-in-GMSCore. cc_library_with_tflite( name = "op_resolver", srcs = ["op_resolver.cc"], hdrs = ["op_resolver.h"], tflite_deps = [ "@org_tensorflow//tensorflow/lite/kernels:builtin_ops", + "@org_tensorflow//tensorflow/lite/c:c_api", + "@org_tensorflow//tensorflow/lite/c:c_api_experimental", # For c_api_opaque.h + "@org_tensorflow//tensorflow/lite/c:common", # For builtin_op_data.h ], deps = [ - "@org_tensorflow//tensorflow/lite:builtin_op_data", + "@org_tensorflow//tensorflow/lite:builtin_ops", ], ) diff --git a/mediapipe/util/tflite/op_resolver.cc b/mediapipe/util/tflite/op_resolver.cc index 23f066666..44eff4566 100644 --- a/mediapipe/util/tflite/op_resolver.cc +++ b/mediapipe/util/tflite/op_resolver.cc @@ -14,47 +14,84 @@ #include "mediapipe/util/tflite/op_resolver.h" -#include "tensorflow/lite/builtin_op_data.h" +#include "tensorflow/lite/builtin_ops.h" +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/c_api.h" +#include "tensorflow/lite/c/c_api_opaque.h" namespace mediapipe { namespace { +constexpr char kMaxPoolingWithArgmax2DOpName[] = "MaxPoolingWithArgmax2D"; +constexpr int kMaxPoolingWithArgmax2DOpVersion = 1; + +constexpr char kMaxUnpooling2DOpName[] = "MaxUnpooling2D"; +constexpr int kMaxUnpooling2DOpVersion = 1; + +constexpr char kConvolution2DTransposeBiasOpName[] = + "Convolution2DTransposeBias"; +constexpr int kConvolution2DTransposeBiasOpVersion = 1; + TfLiteRegistration* RegisterMaxPoolingWithArgmax2D() { - static TfLiteRegistration reg = { - [](TfLiteContext*, const char*, size_t) -> void* { - return new TfLitePaddingValues(); - }, - [](TfLiteContext*, void* buffer) -> void { - delete reinterpret_cast(buffer); - }, - [](TfLiteContext* context, TfLiteNode* node) -> TfLiteStatus { - return kTfLiteOk; - }, - [](TfLiteContext* context, TfLiteNode*) -> TfLiteStatus { - context->ReportError( - context, "MaxPoolingWithArgmax2D is only available on the GPU."); - return kTfLiteError; - }, - }; + static TfLiteRegistrationExternal* reg_external = []() { + // Intentionally allocated and never destroyed. + auto* r = TfLiteRegistrationExternalCreate( + kTfLiteBuiltinCustom, kMaxPoolingWithArgmax2DOpName, + kMaxPoolingWithArgmax2DOpVersion); + TfLiteRegistrationExternalSetInit( + r, [](TfLiteOpaqueContext*, const char*, size_t) -> void* { + return new TfLitePaddingValues(); + }); + TfLiteRegistrationExternalSetFree( + r, [](TfLiteOpaqueContext*, void* buffer) -> void { + delete reinterpret_cast(buffer); + }); + TfLiteRegistrationExternalSetPrepare( + r, + [](TfLiteOpaqueContext* context, + TfLiteOpaqueNode* node) -> TfLiteStatus { return kTfLiteOk; }); + TfLiteRegistrationExternalSetInvoke( + r, [](TfLiteOpaqueContext* context, TfLiteOpaqueNode*) -> TfLiteStatus { + TfLiteOpaqueContextReportError( + context, "MaxPoolingWithArgmax2D is only available on the GPU."); + return kTfLiteError; + }); + return r; + }(); + static TfLiteRegistration reg = {.registration_external = reg_external}; return ® } TfLiteRegistration* RegisterMaxUnpooling2D() { - static TfLiteRegistration reg = {nullptr, nullptr, nullptr, nullptr}; + static TfLiteRegistrationExternal* reg_external = + // Intentionally allocated and never destroyed. + TfLiteRegistrationExternalCreate(kTfLiteBuiltinCustom, + kMaxUnpooling2DOpName, + kMaxUnpooling2DOpVersion); + static TfLiteRegistration reg = {.registration_external = reg_external}; return ® } TfLiteRegistration* RegisterConvolution2DTransposeBias() { - static TfLiteRegistration reg = {nullptr, nullptr, nullptr, nullptr}; + static TfLiteRegistrationExternal* reg_external = + // Intentionally allocated and never destroyed. + TfLiteRegistrationExternalCreate(kTfLiteBuiltinCustom, + kConvolution2DTransposeBiasOpName, + kConvolution2DTransposeBiasOpVersion); + static TfLiteRegistration reg = {.registration_external = reg_external}; return ® } } // namespace OpResolver::OpResolver() { - AddCustom("MaxPoolingWithArgmax2D", RegisterMaxPoolingWithArgmax2D()); - AddCustom("MaxUnpooling2D", RegisterMaxUnpooling2D()); - AddCustom("Convolution2DTransposeBias", RegisterConvolution2DTransposeBias()); + AddCustom(kMaxPoolingWithArgmax2DOpName, RegisterMaxPoolingWithArgmax2D(), + kMaxPoolingWithArgmax2DOpVersion); + AddCustom(kMaxUnpooling2DOpName, RegisterMaxUnpooling2D(), + kMaxUnpooling2DOpVersion); + AddCustom(kConvolution2DTransposeBiasOpName, + RegisterConvolution2DTransposeBias(), + kConvolution2DTransposeBiasOpVersion); } } // namespace mediapipe From d063ed2c1ec20f2b5ce6820bf6c62b5b0b6faf94 Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Tue, 6 Jun 2023 13:50:01 -0700 Subject: [PATCH 027/106] Rename MPPFaceLandmarker.m to MPPFaceLandmarker.mm PiperOrigin-RevId: 538281740 --- mediapipe/tasks/ios/vision/face_landmarker/BUILD | 2 +- .../sources/{MPPFaceLandmarker.m => MPPFaceLandmarker.mm} | 0 2 files changed, 1 insertion(+), 1 deletion(-) rename mediapipe/tasks/ios/vision/face_landmarker/sources/{MPPFaceLandmarker.m => MPPFaceLandmarker.mm} (100%) diff --git a/mediapipe/tasks/ios/vision/face_landmarker/BUILD b/mediapipe/tasks/ios/vision/face_landmarker/BUILD index 14f82237a..8e62ee216 100644 --- a/mediapipe/tasks/ios/vision/face_landmarker/BUILD +++ b/mediapipe/tasks/ios/vision/face_landmarker/BUILD @@ -45,7 +45,7 @@ objc_library( objc_library( name = "MPPFaceLandmarker", - srcs = ["sources/MPPFaceLandmarker.m"], + srcs = ["sources/MPPFaceLandmarker.mm"], hdrs = ["sources/MPPFaceLandmarker.h"], copts = [ "-ObjC++", diff --git a/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarker.m b/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarker.mm similarity index 100% rename from mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarker.m rename to mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarker.mm From 4a123445c4d2ca90f0cb3720e6b39dc6179f14a9 Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Tue, 6 Jun 2023 15:03:56 -0700 Subject: [PATCH 028/106] Update rules_foreign_cc Fixes https://github.com/google/mediapipe/issues/4365 PiperOrigin-RevId: 538301543 --- WORKSPACE | 9 +++++---- third_party/BUILD | 8 ++++---- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/WORKSPACE b/WORKSPACE index 9b50ff8e3..1d7ced979 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -45,12 +45,13 @@ http_archive( ) http_archive( - name = "rules_foreign_cc", - strip_prefix = "rules_foreign_cc-0.1.0", - url = "https://github.com/bazelbuild/rules_foreign_cc/archive/0.1.0.zip", + name = "rules_foreign_cc", + sha256 = "2a4d07cd64b0719b39a7c12218a3e507672b82a97b98c6a89d38565894cf7c51", + strip_prefix = "rules_foreign_cc-0.9.0", + url = "https://github.com/bazelbuild/rules_foreign_cc/archive/refs/tags/0.9.0.tar.gz", ) -load("@rules_foreign_cc//:workspace_definitions.bzl", "rules_foreign_cc_dependencies") +load("@rules_foreign_cc//foreign_cc:repositories.bzl", "rules_foreign_cc_dependencies") rules_foreign_cc_dependencies() diff --git a/third_party/BUILD b/third_party/BUILD index f6107106d..470b7ff99 100644 --- a/third_party/BUILD +++ b/third_party/BUILD @@ -13,7 +13,7 @@ # limitations under the License. # -load("@rules_foreign_cc//tools/build_defs:cmake.bzl", "cmake_external") +load("@rules_foreign_cc//foreign_cc:cmake.bzl", "cmake") load("@bazel_skylib//:bzl_library.bzl", "bzl_library") licenses(["notice"]) # Apache License 2.0 @@ -154,7 +154,7 @@ OPENCV_SHARED_LIBS = True OPENCV_SO_VERSION = "3.4" -cmake_external( +cmake( name = "opencv_cmake", # Values to be passed as -Dkey=value on the CMake command line; # here are serving to provide some CMake script configuration options @@ -230,7 +230,7 @@ cmake_external( "-lpthread", "-lrt", ], - shared_libraries = select({ + out_shared_libs = select({ "@bazel_tools//src/conditions:darwin": ["libopencv_%s.%s.dylib" % (module, OPENCV_SO_VERSION) for module in OPENCV_MODULES], # Only the shared objects listed here will be linked in the directory # that Bazel adds to the RUNPATH of dependent executables. You cannot @@ -238,7 +238,7 @@ cmake_external( # versioned name is the one that the executables actually reference. "//conditions:default": ["libopencv_%s.so.%s" % (module, OPENCV_SO_VERSION) for module in OPENCV_MODULES], }) if OPENCV_SHARED_LIBS else None, - static_libraries = [ + out_static_libs = [ "libopencv_%s.a" % module for module in OPENCV_MODULES ] if not OPENCV_SHARED_LIBS else None, From d6f34f6aefba73472d4407c05f1b5f5f379b626a Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Tue, 6 Jun 2023 15:29:21 -0700 Subject: [PATCH 029/106] Log the Bazel build PiperOrigin-RevId: 538308030 --- setup.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/setup.py b/setup.py index 0d7f29061..4eaa0dcf2 100644 --- a/setup.py +++ b/setup.py @@ -204,6 +204,7 @@ class GeneratePyProtos(build_ext.build_ext): self._protoc, '-I.', '--python_out=' + os.path.abspath(self.build_lib), source ] + print('Invoking: ', protoc_command) if subprocess.call(protoc_command) != 0: sys.exit(-1) @@ -268,6 +269,7 @@ class BuildModules(build_ext.build_ext): 'build', external_file, ] + print('Invoking: ', fetch_model_command) if subprocess.call(fetch_model_command) != 0: sys.exit(-1) _copy_to_build_lib_dir(self.build_lib, external_file) @@ -292,6 +294,8 @@ class BuildModules(build_ext.build_ext): if not self.link_opencv and not IS_WINDOWS: bazel_command.append('--define=OPENCV=source') + + print('Invoking: ', bazel_command) if subprocess.call(bazel_command) != 0: sys.exit(-1) _copy_to_build_lib_dir(self.build_lib, binary_graph_target + '.binarypb') @@ -322,6 +326,7 @@ class GenerateMetadataSchema(build_ext.build_ext): bazel_command.append('--copt=-DMESA_EGL_NO_X11_HEADERS') bazel_command.append('--copt=-DEGL_NO_X11') + print('Invoking: ', bazel_command) if subprocess.call(bazel_command) != 0: sys.exit(-1) _copy_to_build_lib_dir( @@ -392,6 +397,8 @@ class BuildExtension(build_ext.build_ext): x86_name, arm64_name, ] + + print('Invoking: ', lipo_command) if subprocess.call(lipo_command) != 0: sys.exit(-1) else: @@ -421,6 +428,8 @@ class BuildExtension(build_ext.build_ext): bazel_command += extra_args if not self.link_opencv and not IS_WINDOWS: bazel_command.append('--define=OPENCV=source') + + print('Invoking: ', bazel_command) if subprocess.call(bazel_command) != 0: sys.exit(-1) ext_bazel_bin_path = os.path.join('bazel-bin', ext.relpath, From 4b0f3cacaec61436536d65c2c342a477e37ab757 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 6 Jun 2023 15:51:00 -0700 Subject: [PATCH 030/106] Internal change PiperOrigin-RevId: 538313290 --- .../python/vision/object_detector/model.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/mediapipe/model_maker/python/vision/object_detector/model.py b/mediapipe/model_maker/python/vision/object_detector/model.py index 70e63d5b5..b1b4951fd 100644 --- a/mediapipe/model_maker/python/vision/object_detector/model.py +++ b/mediapipe/model_maker/python/vision/object_detector/model.py @@ -101,14 +101,17 @@ class ObjectDetectorModel(tf.keras.Model): ) return model_config - def _build_model(self) -> tf.keras.Model: + def _build_model(self, omit_l2=False) -> tf.keras.Model: """Builds a RetinaNet object detector model.""" input_specs = tf.keras.layers.InputSpec( shape=[None] + self._model_spec.input_image_shape ) - l2_regularizer = tf.keras.regularizers.l2( - self._model_options.l2_weight_decay / 2.0 - ) + if omit_l2: + l2_regularizer = None + else: + l2_regularizer = tf.keras.regularizers.l2( + self._model_options.l2_weight_decay / 2.0 + ) model_config = self._get_model_config() return factory.build_retinanet(input_specs, model_config, l2_regularizer) @@ -167,7 +170,7 @@ class ObjectDetectorModel(tf.keras.Model): def convert_to_qat(self) -> None: """Converts the model to a QAT RetinaNet model.""" - model = self._build_model() + model = self._build_model(omit_l2=True) dummy_input = tf.zeros([1] + self._model_spec.input_image_shape) model(dummy_input, training=True) model.set_weights(self._model.get_weights()) From 8a5b443b8656339dc574e58dd681bfe5621386c9 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Wed, 7 Jun 2023 18:44:54 +0530 Subject: [PATCH 031/106] Fixed typos in method names --- .../sources/MPPGestureRecognizerResult+ProtobufHelpers.mm | 2 +- .../utils/sources/MPPGestureRecognizerResult+Helpers.h | 2 +- .../utils/sources/MPPGestureRecognizerResult+Helpers.mm | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtobufHelpers.mm b/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtobufHelpers.mm index b115229c7..f628499d5 100644 --- a/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtobufHelpers.mm +++ b/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtobufHelpers.mm @@ -59,7 +59,7 @@ using ::mediapipe::tasks::ios::test::vision::utils::get_proto_from_pbtxt; handednessProto:{landmarkDetectionResultProto.classifications()} handLandmarksProto:{landmarkDetectionResultProto.landmarks()} worldLandmarksProto:{landmarkDetectionResultProto.world_landmarks()} - timestampInMilliSeconds:0]; + timestampInMilliseconds:0]; } @end diff --git a/mediapipe/tasks/ios/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+Helpers.h b/mediapipe/tasks/ios/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+Helpers.h index 0e2eede03..2fe4c9110 100644 --- a/mediapipe/tasks/ios/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+Helpers.h +++ b/mediapipe/tasks/ios/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+Helpers.h @@ -67,7 +67,7 @@ static const int kMicroSecondsPerMilliSecond = 1000; handLandmarksProto worldLandmarksProto: (const std::vector<::mediapipe::LandmarkList> &)worldLandmarksProto - timestampInMilliSeconds:(NSInteger)timestampInMilliseconds; + timestampInMilliseconds:(NSInteger)timestampInMilliseconds; @end NS_ASSUME_NONNULL_END diff --git a/mediapipe/tasks/ios/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+Helpers.mm b/mediapipe/tasks/ios/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+Helpers.mm index f129da8e5..daa6d2fb1 100644 --- a/mediapipe/tasks/ios/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+Helpers.mm +++ b/mediapipe/tasks/ios/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+Helpers.mm @@ -46,7 +46,7 @@ static const NSInteger kDefaultGestureIndex = -1; handLandmarksProto worldLandmarksProto: (const std::vector &)worldLandmarksProto - timestampInMilliSeconds:(NSInteger)timestampInMilliseconds { + timestampInMilliseconds:(NSInteger)timestampInMilliseconds { NSMutableArray *> *multiHandGestures = [NSMutableArray arrayWithCapacity:(NSUInteger)handGesturesProto.size()]; @@ -141,7 +141,7 @@ static const NSInteger kDefaultGestureIndex = -1; std::vector>() worldLandmarksProto:worldLandmarksPacket .Get>() - timestampInMilliSeconds:timestampInMilliseconds]; + timestampInMilliseconds:timestampInMilliseconds]; } @end From 10144a805a004febf270af64af334adb5df10b6c Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Wed, 7 Jun 2023 18:45:37 +0530 Subject: [PATCH 032/106] Added more tests to MPPGestureRecognizerTests.mm --- .../MPPGestureRecognizerTests.m | 454 +++++++++++++++++- 1 file changed, 441 insertions(+), 13 deletions(-) diff --git a/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m b/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m index 58e3bc9cf..2e38b4a9d 100644 --- a/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m +++ b/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m @@ -48,6 +48,9 @@ static const NSInteger kGestureExpectedIndex = -1; static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks"; static const float kLandmarksErrorTolerance = 0.03f; +static NSString *const kLiveStreamTestsDictGestureRecognizerKey = @"gesture_recognizer"; +static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; + #define AssertEqualErrors(error, expectedError) \ XCTAssertNotNil(error); \ XCTAssertEqualObjects(error.domain, expectedError.domain); \ @@ -72,12 +75,15 @@ static const float kLandmarksErrorTolerance = 0.03f; XCTAssertTrue(gestureRecognizerResult.landmarks.count == 0); \ XCTAssertTrue(gestureRecognizerResult.worldLandmarks.count == 0); -@interface MPPGestureRecognizerTests : XCTestCase +@interface MPPGestureRecognizerTests : XCTestCase { + NSDictionary *_liveStreamSucceedsTestDict; + NSDictionary *_outOfOrderTimestampTestDict; +} @end @implementation MPPGestureRecognizerTests -#pragma mark Results +#pragma mark Expected Results + (MPPGestureRecognizerResult *)emptyGestureRecognizerResult { return [[MPPGestureRecognizerResult alloc] initWithGestures:@[] @@ -106,6 +112,8 @@ static const float kLandmarksErrorTolerance = 0.03f; shouldRemoveZPosition:YES]; } +#pragma mark Assert Gesture Recognizer Results + - (void)assertMultiHandLandmarks:(NSArray *> *)multiHandLandmarks areApproximatelyEqualToExpectedMultiHandLandmarks: (NSArray *> *)expectedMultiHandLandmarks { @@ -175,6 +183,16 @@ static const float kLandmarksErrorTolerance = 0.03f; areApproximatelyEqualToExpectedMultiHandGestures:expectedGestureRecognizerResult.gestures]; } +- (void)assertResultsOfRecognizeImageWithFileInfo:(ResourceFileInfo *)fileInfo + usingGestureRecognizer:(MPPGestureRecognizer *)gestureRecognizer + approximatelyEqualsGestureRecognizerResult: + (MPPGestureRecognizerResult *)expectedGestureRecognizerResult { + MPPGestureRecognizerResult *gestureRecognizerResult = + [self recognizeImageWithFileInfo:fileInfo usingGestureRecognizer:gestureRecognizer]; + [self assertGestureRecognizerResult:gestureRecognizerResult + isApproximatelyEqualToExpectedResult:expectedGestureRecognizerResult]; +} + #pragma mark File + (NSString *)filePathWithFileInfo:(ResourceFileInfo *)fileInfo { @@ -221,7 +239,7 @@ static const float kLandmarksErrorTolerance = 0.03f; AssertEqualErrors(error, expectedError); } -#pragma mark Assert Gesture Recognizer Results +#pragma mark Recognize Helpers - (MPPImage *)imageWithFileInfo:(ResourceFileInfo *)fileInfo { MPPImage *image = [MPPImage imageFromBundleWithClass:[MPPGestureRecognizerTests class] @@ -254,16 +272,6 @@ static const float kLandmarksErrorTolerance = 0.03f; return gestureRecognizerResult; } -- (void)assertResultsOfRecognizeImageWithFileInfo:(ResourceFileInfo *)fileInfo - usingGestureRecognizer:(MPPGestureRecognizer *)gestureRecognizer - approximatelyEqualsGestureRecognizerResult: - (MPPGestureRecognizerResult *)expectedGestureRecognizerResult { - MPPGestureRecognizerResult *gestureRecognizerResult = - [self recognizeImageWithFileInfo:fileInfo usingGestureRecognizer:gestureRecognizer]; - [self assertGestureRecognizerResult:gestureRecognizerResult - isApproximatelyEqualToExpectedResult:expectedGestureRecognizerResult]; -} - #pragma mark General Tests - (void)testRecognizeWithModelPathSucceeds { @@ -279,4 +287,424 @@ static const float kLandmarksErrorTolerance = 0.03f; thumbUpGestureRecognizerResult]]; } +- (void)testRecognizeWithEmptyResultsSucceeds { + MPPGestureRecognizerOptions *gestureRecognizerOptions = + [self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile]; + + MPPGestureRecognizer *gestureRecognizer = + [self createGestureRecognizerWithOptionsSucceeds:gestureRecognizerOptions]; + + MPPGestureRecognizerResult *gestureRecognizerResult = + [self recognizeImageWithFileInfo:kNoHandsImage usingGestureRecognizer:gestureRecognizer]; + AssertGestureRecognizerResultIsEmpty(gestureRecognizerResult); +} + +- (void)testRecognizeWithScoreThresholdSucceeds { + MPPGestureRecognizerOptions *gestureRecognizerOptions = + [self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile]; + gestureRecognizerOptions.cannedGesturesClassifierOptions = [[MPPClassifierOptions alloc] init]; + gestureRecognizerOptions.cannedGesturesClassifierOptions.scoreThreshold = 0.5f; + + MPPGestureRecognizer *gestureRecognizer = + [self createGestureRecognizerWithOptionsSucceeds:gestureRecognizerOptions]; + + MPPGestureRecognizerResult *gestureRecognizerResult = + [self recognizeImageWithFileInfo:kThumbUpImage usingGestureRecognizer:gestureRecognizer]; + + MPPGestureRecognizerResult *expectedGestureRecognizerResult = + [MPPGestureRecognizerTests thumbUpGestureRecognizerResult]; + + XCTAssertTrue(gestureRecognizerResult.gestures.count == 1); + AssertEqualGestures(gestureRecognizerResult.gestures[0][0], + expectedGestureRecognizerResult.gestures[0][0], 0, 0); +} + +- (void)testRecognizeWithNumHandsSucceeds { + MPPGestureRecognizerOptions *gestureRecognizerOptions = + [self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile]; + + const NSInteger numHands = 2; + gestureRecognizerOptions.numHands = numHands; + + MPPGestureRecognizer *gestureRecognizer = + [self createGestureRecognizerWithOptionsSucceeds:gestureRecognizerOptions]; + + MPPGestureRecognizerResult *gestureRecognizerResult = + [self recognizeImageWithFileInfo:kTwoHandsImage usingGestureRecognizer:gestureRecognizer]; + + XCTAssertTrue(gestureRecognizerResult.handedness.count == numHands); +} + +- (void)testRecognizeWithRotationSucceeds { + MPPGestureRecognizerOptions *gestureRecognizerOptions = + [self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile]; + + gestureRecognizerOptions.numHands = 1; + + MPPGestureRecognizer *gestureRecognizer = + [self createGestureRecognizerWithOptionsSucceeds:gestureRecognizerOptions]; + MPPImage *mppImage = [self imageWithFileInfo:kPointingUpRotatedImage + orientation:UIImageOrientationRight]; + + MPPGestureRecognizerResult *gestureRecognizerResult = [gestureRecognizer recognizeImage:mppImage + error:nil]; + + XCTAssertNotNil(gestureRecognizerResult); + + XCTAssertEqual(gestureRecognizerResult.gestures.count, 1); + XCTAssertEqualObjects(gestureRecognizerResult.gestures[0][0].categoryName, + kExpectedPointingUpLabel); +} + +- (void)testRecognizeWithCannedGestureFistSucceeds { + MPPGestureRecognizerOptions *gestureRecognizerOptions = + [self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile]; + + gestureRecognizerOptions.numHands = 1; + + MPPGestureRecognizer *gestureRecognizer = + [self createGestureRecognizerWithOptionsSucceeds:gestureRecognizerOptions]; + + [self assertResultsOfRecognizeImageWithFileInfo:kFistImage + usingGestureRecognizer:gestureRecognizer + approximatelyEqualsGestureRecognizerResult: + [MPPGestureRecognizerTests fistGestureRecognizerResultWithLabel:kFistLabel]]; +} + +- (void)testRecognizeWithAllowGestureFistSucceeds { + MPPGestureRecognizerOptions *gestureRecognizerOptions = + [self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile]; + gestureRecognizerOptions.cannedGesturesClassifierOptions = [[MPPClassifierOptions alloc] init]; + gestureRecognizerOptions.cannedGesturesClassifierOptions.scoreThreshold = 0.5f; + gestureRecognizerOptions.cannedGesturesClassifierOptions.categoryAllowlist = @[ kFistLabel ]; + + gestureRecognizerOptions.numHands = 1; + + MPPGestureRecognizer *gestureRecognizer = + [self createGestureRecognizerWithOptionsSucceeds:gestureRecognizerOptions]; + + [self assertResultsOfRecognizeImageWithFileInfo:kFistImage + usingGestureRecognizer:gestureRecognizer + approximatelyEqualsGestureRecognizerResult: + [MPPGestureRecognizerTests fistGestureRecognizerResultWithLabel:kFistLabel]]; +} + +- (void)testRecognizeWithDenyGestureFistSucceeds { + MPPGestureRecognizerOptions *gestureRecognizerOptions = + [self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile]; + gestureRecognizerOptions.cannedGesturesClassifierOptions = [[MPPClassifierOptions alloc] init]; + gestureRecognizerOptions.cannedGesturesClassifierOptions.scoreThreshold = 0.5f; + gestureRecognizerOptions.cannedGesturesClassifierOptions.categoryDenylist = @[ kFistLabel ]; + + gestureRecognizerOptions.numHands = 1; + + MPPGestureRecognizer *gestureRecognizer = + [self createGestureRecognizerWithOptionsSucceeds:gestureRecognizerOptions]; + MPPGestureRecognizerResult *gestureRecognizerResult = + [self recognizeImageWithFileInfo:kFistImage usingGestureRecognizer:gestureRecognizer]; + AssertGestureRecognizerResultIsEmpty(gestureRecognizerResult); +} + +- (void)testRecognizeWithPreferAllowlistOverDenylistSucceeds { + MPPGestureRecognizerOptions *gestureRecognizerOptions = + [self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile]; + gestureRecognizerOptions.cannedGesturesClassifierOptions = [[MPPClassifierOptions alloc] init]; + gestureRecognizerOptions.cannedGesturesClassifierOptions.scoreThreshold = 0.5f; + gestureRecognizerOptions.cannedGesturesClassifierOptions.categoryAllowlist = @[ kFistLabel ]; + gestureRecognizerOptions.cannedGesturesClassifierOptions.categoryDenylist = @[ kFistLabel ]; + + gestureRecognizerOptions.numHands = 1; + + MPPGestureRecognizer *gestureRecognizer = + [self createGestureRecognizerWithOptionsSucceeds:gestureRecognizerOptions]; + + [self assertResultsOfRecognizeImageWithFileInfo:kFistImage + usingGestureRecognizer:gestureRecognizer + approximatelyEqualsGestureRecognizerResult: + [MPPGestureRecognizerTests fistGestureRecognizerResultWithLabel:kFistLabel]]; +} + +#pragma mark Running Mode Tests + +- (void)testCreateGestureRecognizerFailsWithDelegateInNonLiveStreamMode { + MPPRunningMode runningModesToTest[] = {MPPRunningModeImage, MPPRunningModeVideo}; + for (int i = 0; i < sizeof(runningModesToTest) / sizeof(runningModesToTest[0]); i++) { + MPPGestureRecognizerOptions *options = + [self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile]; + + options.runningMode = runningModesToTest[i]; + options.gestureRecognizerLiveStreamDelegate = self; + + [self assertCreateGestureRecognizerWithOptions:options + failsWithExpectedError: + [NSError + errorWithDomain:kExpectedErrorDomain + code:MPPTasksErrorCodeInvalidArgumentError + userInfo:@{ + NSLocalizedDescriptionKey : + @"The vision task is in image or video mode. The " + @"delegate must not be set in the task's options." + }]]; + } +} + +- (void)testCreateGestureRecognizerFailsWithMissingDelegateInLiveStreamMode { + MPPGestureRecognizerOptions *options = + [self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile]; + + options.runningMode = MPPRunningModeLiveStream; + + [self + assertCreateGestureRecognizerWithOptions:options + failsWithExpectedError: + [NSError errorWithDomain:kExpectedErrorDomain + code:MPPTasksErrorCodeInvalidArgumentError + userInfo:@{ + NSLocalizedDescriptionKey : + @"The vision task is in live stream mode. An " + @"object must be set as the delegate of the task " + @"in its options to ensure asynchronous delivery " + @"of results." + }]]; +} + +- (void)testRecognizeFailsWithCallingWrongApiInImageMode { + MPPGestureRecognizerOptions *options = + [self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile]; + + MPPGestureRecognizer *gestureRecognizer = + [self createGestureRecognizerWithOptionsSucceeds:options]; + + MPPImage *image = [self imageWithFileInfo:kFistImage]; + + NSError *liveStreamApiCallError; + XCTAssertFalse([gestureRecognizer recognizeAsyncImage:image + timestampInMilliseconds:0 + error:&liveStreamApiCallError]); + + NSError *expectedLiveStreamApiCallError = + [NSError errorWithDomain:kExpectedErrorDomain + code:MPPTasksErrorCodeInvalidArgumentError + userInfo:@{ + NSLocalizedDescriptionKey : @"The vision task is not initialized with live " + @"stream mode. Current Running Mode: Image" + }]; + + AssertEqualErrors(liveStreamApiCallError, expectedLiveStreamApiCallError); + + NSError *videoApiCallError; + XCTAssertFalse([gestureRecognizer recognizeVideoFrame:image + timestampInMilliseconds:0 + error:&videoApiCallError]); + + NSError *expectedVideoApiCallError = + [NSError errorWithDomain:kExpectedErrorDomain + code:MPPTasksErrorCodeInvalidArgumentError + userInfo:@{ + NSLocalizedDescriptionKey : @"The vision task is not initialized with " + @"video mode. Current Running Mode: Image" + }]; + AssertEqualErrors(videoApiCallError, expectedVideoApiCallError); +} + +- (void)testRecognizeFailsWithCallingWrongApiInVideoMode { + MPPGestureRecognizerOptions *options = + [self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile]; + options.runningMode = MPPRunningModeVideo; + + MPPGestureRecognizer *gestureRecognizer = + [self createGestureRecognizerWithOptionsSucceeds:options]; + + MPPImage *image = [self imageWithFileInfo:kFistImage]; + + NSError *liveStreamApiCallError; + XCTAssertFalse([gestureRecognizer recognizeAsyncImage:image + timestampInMilliseconds:0 + error:&liveStreamApiCallError]); + + NSError *expectedLiveStreamApiCallError = + [NSError errorWithDomain:kExpectedErrorDomain + code:MPPTasksErrorCodeInvalidArgumentError + userInfo:@{ + NSLocalizedDescriptionKey : @"The vision task is not initialized with live " + @"stream mode. Current Running Mode: Video" + }]; + + AssertEqualErrors(liveStreamApiCallError, expectedLiveStreamApiCallError); + + NSError *imageApiCallError; + XCTAssertFalse([gestureRecognizer recognizeImage:image error:&imageApiCallError]); + + NSError *expectedImageApiCallError = + [NSError errorWithDomain:kExpectedErrorDomain + code:MPPTasksErrorCodeInvalidArgumentError + userInfo:@{ + NSLocalizedDescriptionKey : @"The vision task is not initialized with " + @"image mode. Current Running Mode: Video" + }]; + AssertEqualErrors(imageApiCallError, expectedImageApiCallError); +} + +- (void)testRecognizeFailsWithCallingWrongApiInLiveStreamMode { + MPPGestureRecognizerOptions *options = + [self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile]; + options.runningMode = MPPRunningModeLiveStream; + options.gestureRecognizerLiveStreamDelegate = self; + + MPPGestureRecognizer *gestureRecognizer = + [self createGestureRecognizerWithOptionsSucceeds:options]; + + MPPImage *image = [self imageWithFileInfo:kFistImage]; + + NSError *imageApiCallError; + XCTAssertFalse([gestureRecognizer recognizeImage:image error:&imageApiCallError]); + + NSError *expectedImageApiCallError = + [NSError errorWithDomain:kExpectedErrorDomain + code:MPPTasksErrorCodeInvalidArgumentError + userInfo:@{ + NSLocalizedDescriptionKey : @"The vision task is not initialized with " + @"image mode. Current Running Mode: Live Stream" + }]; + AssertEqualErrors(imageApiCallError, expectedImageApiCallError); + + NSError *videoApiCallError; + XCTAssertFalse([gestureRecognizer recognizeVideoFrame:image + timestampInMilliseconds:0 + error:&videoApiCallError]); + + NSError *expectedVideoApiCallError = + [NSError errorWithDomain:kExpectedErrorDomain + code:MPPTasksErrorCodeInvalidArgumentError + userInfo:@{ + NSLocalizedDescriptionKey : @"The vision task is not initialized with " + @"video mode. Current Running Mode: Live Stream" + }]; + AssertEqualErrors(videoApiCallError, expectedVideoApiCallError); +} + +- (void)testRecognizeWithVideoModeSucceeds { + MPPGestureRecognizerOptions *options = + [self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile]; + options.runningMode = MPPRunningModeVideo; + + MPPGestureRecognizer *gestureRecognizer = + [self createGestureRecognizerWithOptionsSucceeds:options]; + + MPPImage *image = [self imageWithFileInfo:kThumbUpImage]; + + for (int i = 0; i < 3; i++) { + MPPGestureRecognizerResult *gestureRecognizerResult = + [gestureRecognizer recognizeVideoFrame:image timestampInMilliseconds:i error:nil]; + [self assertGestureRecognizerResult:gestureRecognizerResult + isApproximatelyEqualToExpectedResult:[MPPGestureRecognizerTests + thumbUpGestureRecognizerResult]]; + } +} + +- (void)testRecognizeWithOutOfOrderTimestampsAndLiveStreamModeFails { + MPPGestureRecognizerOptions *options = + [self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile]; + options.runningMode = MPPRunningModeLiveStream; + options.gestureRecognizerLiveStreamDelegate = self; + + XCTestExpectation *expectation = [[XCTestExpectation alloc] + initWithDescription:@"recognizeWithOutOfOrderTimestampsAndLiveStream"]; + + expectation.expectedFulfillmentCount = 1; + + MPPGestureRecognizer *gestureRecognizer = + [self createGestureRecognizerWithOptionsSucceeds:options]; + + _outOfOrderTimestampTestDict = @{ + kLiveStreamTestsDictGestureRecognizerKey : gestureRecognizer, + kLiveStreamTestsDictExpectationKey : expectation + }; + + MPPImage *image = [self imageWithFileInfo:kThumbUpImage]; + + XCTAssertTrue([gestureRecognizer recognizeAsyncImage:image timestampInMilliseconds:1 error:nil]); + + NSError *error; + XCTAssertFalse([gestureRecognizer recognizeAsyncImage:image + timestampInMilliseconds:0 + error:&error]); + + NSError *expectedError = + [NSError errorWithDomain:kExpectedErrorDomain + code:MPPTasksErrorCodeInvalidArgumentError + userInfo:@{ + NSLocalizedDescriptionKey : + @"INVALID_ARGUMENT: Input timestamp must be monotonically increasing." + }]; + AssertEqualErrors(error, expectedError); + + NSTimeInterval timeout = 0.5f; + [self waitForExpectations:@[ expectation ] timeout:timeout]; +} + +- (void)testRecognizeWithLiveStreamModeSucceeds { + MPPGestureRecognizerOptions *options = + [self gestureRecognizerOptionsWithModelFileInfo:kGestureRecognizerBundleAssetFile]; + options.runningMode = MPPRunningModeLiveStream; + options.gestureRecognizerLiveStreamDelegate = self; + + NSInteger iterationCount = 100; + + // Because of flow limiting, we cannot ensure that the callback will be + // invoked `iterationCount` times. + // An normal expectation will fail if expectation.fulfill() is not called + // `expectation.expectedFulfillmentCount` times. + // If `expectation.isInverted = true`, the test will only succeed if + // expectation is not fulfilled for the specified `expectedFulfillmentCount`. + // Since in our case we cannot predict how many times the expectation is + // supposed to be fullfilled setting, + // `expectation.expectedFulfillmentCount` = `iterationCount` + 1 and + // `expectation.isInverted = true` ensures that test succeeds if + // expectation is fullfilled <= `iterationCount` times. + XCTestExpectation *expectation = + [[XCTestExpectation alloc] initWithDescription:@"recognizeWithLiveStream"]; + + expectation.expectedFulfillmentCount = iterationCount + 1; + expectation.inverted = YES; + + MPPGestureRecognizer *gestureRecognizer = + [self createGestureRecognizerWithOptionsSucceeds:options]; + + _liveStreamSucceedsTestDict = @{ + kLiveStreamTestsDictGestureRecognizerKey : gestureRecognizer, + kLiveStreamTestsDictExpectationKey : expectation + }; + + // TODO: Mimic initialization from CMSampleBuffer as live stream mode is most likely to be used + // with the iOS camera. AVCaptureVideoDataOutput sample buffer delegates provide frames of type + // `CMSampleBuffer`. + MPPImage *image = [self imageWithFileInfo:kThumbUpImage]; + + for (int i = 0; i < iterationCount; i++) { + XCTAssertTrue([gestureRecognizer recognizeAsyncImage:image + timestampInMilliseconds:i + error:nil]); + } + + NSTimeInterval timeout = 0.5f; + [self waitForExpectations:@[ expectation ] timeout:timeout]; +} + +- (void)gestureRecognizer:(MPPGestureRecognizer *)gestureRecognizer + didFinishRecognitionWithResult:(MPPGestureRecognizerResult *)gestureRecognizerResult + timestampInMilliseconds:(NSInteger)timestampInMilliseconds + error:(NSError *)error { + [self assertGestureRecognizerResult:gestureRecognizerResult + isApproximatelyEqualToExpectedResult:[MPPGestureRecognizerTests + thumbUpGestureRecognizerResult]]; + + if (gestureRecognizer == _outOfOrderTimestampTestDict[kLiveStreamTestsDictGestureRecognizerKey]) { + [_outOfOrderTimestampTestDict[kLiveStreamTestsDictExpectationKey] fulfill]; + } else if (gestureRecognizer == + _liveStreamSucceedsTestDict[kLiveStreamTestsDictGestureRecognizerKey]) { + [_liveStreamSucceedsTestDict[kLiveStreamTestsDictExpectationKey] fulfill]; + } +} + @end From 489e9274101852866a593a87fc7d592d41cc89ea Mon Sep 17 00:00:00 2001 From: Ilya Tokar Date: Wed, 7 Jun 2023 11:04:52 -0700 Subject: [PATCH 033/106] Fix tests to work with arch haswell/sandybridge. PiperOrigin-RevId: 538538356 --- .../tasks/cc/text/text_embedder/text_embedder_test.cc | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/mediapipe/tasks/cc/text/text_embedder/text_embedder_test.cc b/mediapipe/tasks/cc/text/text_embedder/text_embedder_test.cc index 29a00a2ac..76634a922 100644 --- a/mediapipe/tasks/cc/text/text_embedder/text_embedder_test.cc +++ b/mediapipe/tasks/cc/text/text_embedder/text_embedder_test.cc @@ -45,7 +45,7 @@ constexpr char kUniversalSentenceEncoderModel[] = // Tolerance for embedding vector coordinate values. constexpr float kEpsilon = 1e-4; // Tolerancy for cosine similarity evaluation. -constexpr double kSimilarityTolerancy = 1e-6; +constexpr double kSimilarityTolerancy = 2e-2; using ::mediapipe::file::JoinPath; using ::testing::HasSubstr; @@ -79,6 +79,8 @@ TEST_F(EmbedderTest, SucceedsWithMobileBert) { ASSERT_EQ(result0.embeddings[0].float_embedding.size(), 512); #ifdef _WIN32 ASSERT_NEAR(result0.embeddings[0].float_embedding[0], 21.2148f, kEpsilon); +#elif defined(__FMA__) + ASSERT_NEAR(result0.embeddings[0].float_embedding[0], 21.3605f, kEpsilon); #else ASSERT_NEAR(result0.embeddings[0].float_embedding[0], 19.9016f, kEpsilon); #endif // _WIN32 @@ -87,7 +89,11 @@ TEST_F(EmbedderTest, SucceedsWithMobileBert) { auto result1, text_embedder->Embed("what a great and fantastic trip")); ASSERT_EQ(result1.embeddings.size(), 1); ASSERT_EQ(result1.embeddings[0].float_embedding.size(), 512); +#ifdef __FMA__ + ASSERT_NEAR(result1.embeddings[0].float_embedding[0], 21.254150f, kEpsilon); +#else ASSERT_NEAR(result1.embeddings[0].float_embedding[0], 22.626251f, kEpsilon); +#endif // Check cosine similarity. MP_ASSERT_OK_AND_ASSIGN( From a7cd7b9a321371737f0bfd27e86800b110acf369 Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Wed, 7 Jun 2023 11:09:27 -0700 Subject: [PATCH 034/106] Add CommonJS bundle for MediaPipe Tasks Fixes https://github.com/google/mediapipe/issues/4398 PiperOrigin-RevId: 538539711 --- mediapipe/tasks/web/audio/BUILD | 19 ++++++++++++++++++- mediapipe/tasks/web/package.json | 4 +++- mediapipe/tasks/web/text/BUILD | 19 ++++++++++++++++++- mediapipe/tasks/web/vision/BUILD | 19 ++++++++++++++++++- 4 files changed, 57 insertions(+), 4 deletions(-) diff --git a/mediapipe/tasks/web/audio/BUILD b/mediapipe/tasks/web/audio/BUILD index 409836800..4dd5a2f6b 100644 --- a/mediapipe/tasks/web/audio/BUILD +++ b/mediapipe/tasks/web/audio/BUILD @@ -43,7 +43,23 @@ rollup_bundle( entry_point = "index.ts", format = "esm", output_dir = False, - sourcemap = "false", + sourcemap = "true", + deps = [ + ":audio_lib", + "@npm//@rollup/plugin-commonjs", + "@npm//@rollup/plugin-node-resolve", + "@npm//@rollup/plugin-terser", + "@npm//google-protobuf", + ], +) + +rollup_bundle( + name = "audio_bundle_cjs", + config_file = "//mediapipe/tasks/web:rollup.config.mjs", + entry_point = "index.ts", + format = "cjs", + output_dir = False, + sourcemap = "true", deps = [ ":audio_lib", "@npm//@rollup/plugin-commonjs", @@ -76,6 +92,7 @@ pkg_npm( "wasm/audio_wasm_nosimd_internal.js", "wasm/audio_wasm_nosimd_internal.wasm", ":audio_bundle", + ":audio_bundle_cjs", ":package_json", ], ) diff --git a/mediapipe/tasks/web/package.json b/mediapipe/tasks/web/package.json index 89c9a599e..3f495d151 100644 --- a/mediapipe/tasks/web/package.json +++ b/mediapipe/tasks/web/package.json @@ -2,7 +2,9 @@ "name": "@mediapipe/tasks-__NAME__", "version": "__VERSION__", "description": "__DESCRIPTION__", - "main": "__NAME___bundle.js", + "main": "__NAME___bundle_cjs.js", + "browser": "__NAME___bundle.js", + "module": "__NAME___bundle.js", "author": "mediapipe@google.com", "license": "Apache-2.0", "types": "__TYPES__", diff --git a/mediapipe/tasks/web/text/BUILD b/mediapipe/tasks/web/text/BUILD index 08cbb8672..f68a8c9f5 100644 --- a/mediapipe/tasks/web/text/BUILD +++ b/mediapipe/tasks/web/text/BUILD @@ -44,7 +44,23 @@ rollup_bundle( entry_point = "index.ts", format = "esm", output_dir = False, - sourcemap = "false", + sourcemap = "true", + deps = [ + ":text_lib", + "@npm//@rollup/plugin-commonjs", + "@npm//@rollup/plugin-node-resolve", + "@npm//@rollup/plugin-terser", + "@npm//google-protobuf", + ], +) + +rollup_bundle( + name = "text_bundle_cjs", + config_file = "//mediapipe/tasks/web:rollup.config.mjs", + entry_point = "index.ts", + format = "cjs", + output_dir = False, + sourcemap = "true", deps = [ ":text_lib", "@npm//@rollup/plugin-commonjs", @@ -78,5 +94,6 @@ pkg_npm( "wasm/text_wasm_nosimd_internal.wasm", ":package_json", ":text_bundle", + ":text_bundle_cjs", ], ) diff --git a/mediapipe/tasks/web/vision/BUILD b/mediapipe/tasks/web/vision/BUILD index 10e98de8b..a7767fe53 100644 --- a/mediapipe/tasks/web/vision/BUILD +++ b/mediapipe/tasks/web/vision/BUILD @@ -55,7 +55,23 @@ rollup_bundle( entry_point = "index.ts", format = "esm", output_dir = False, - sourcemap = "false", + sourcemap = "true", + deps = [ + ":vision_lib", + "@npm//@rollup/plugin-commonjs", + "@npm//@rollup/plugin-node-resolve", + "@npm//@rollup/plugin-terser", + "@npm//google-protobuf", + ], +) + +rollup_bundle( + name = "vision_bundle_cjs", + config_file = "//mediapipe/tasks/web:rollup.config.mjs", + entry_point = "index.ts", + format = "cjs", + output_dir = False, + sourcemap = "true", deps = [ ":vision_lib", "@npm//@rollup/plugin-commonjs", @@ -89,5 +105,6 @@ pkg_npm( "wasm/vision_wasm_nosimd_internal.wasm", ":package_json", ":vision_bundle", + ":vision_bundle_cjs", ], ) From 943445fba84ec55d1833d769e1dc73f806f6444b Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 7 Jun 2023 20:01:56 -0700 Subject: [PATCH 035/106] Update base audio/vision tasks api to suuport proto3 graph options. PiperOrigin-RevId: 538661975 --- mediapipe/tasks/cc/audio/core/BUILD | 1 + mediapipe/tasks/cc/audio/core/audio_task_api_factory.h | 10 +++------- mediapipe/tasks/cc/core/task_api_factory.h | 1 - mediapipe/tasks/cc/vision/core/BUILD | 2 ++ .../tasks/cc/vision/core/vision_task_api_factory.h | 10 +++------- 5 files changed, 9 insertions(+), 15 deletions(-) diff --git a/mediapipe/tasks/cc/audio/core/BUILD b/mediapipe/tasks/cc/audio/core/BUILD index 8372b1aa2..4f821f6d5 100644 --- a/mediapipe/tasks/cc/audio/core/BUILD +++ b/mediapipe/tasks/cc/audio/core/BUILD @@ -43,6 +43,7 @@ cc_library( ":base_audio_task_api", "//mediapipe/calculators/core:flow_limiter_calculator", "//mediapipe/framework:calculator_cc_proto", + "//mediapipe/tasks/cc/core:task_api_factory", "@com_google_absl//absl/status", "@com_google_absl//absl/status:statusor", "@com_google_absl//absl/strings", diff --git a/mediapipe/tasks/cc/audio/core/audio_task_api_factory.h b/mediapipe/tasks/cc/audio/core/audio_task_api_factory.h index bdac1cad0..901419a57 100644 --- a/mediapipe/tasks/cc/audio/core/audio_task_api_factory.h +++ b/mediapipe/tasks/cc/audio/core/audio_task_api_factory.h @@ -27,6 +27,7 @@ limitations under the License. #include "absl/strings/str_cat.h" #include "mediapipe/framework/calculator.pb.h" #include "mediapipe/tasks/cc/audio/core/base_audio_task_api.h" +#include "mediapipe/tasks/cc/core/task_api_factory.h" #include "tensorflow/lite/core/api/op_resolver.h" namespace mediapipe { @@ -60,13 +61,8 @@ class AudioTaskApiFactory { "Task graph config should only contain one task subgraph node.", MediaPipeTasksStatus::kInvalidTaskGraphConfigError); } else { - if (!node.options().HasExtension(Options::ext)) { - return CreateStatusWithPayload( - absl::StatusCode::kInvalidArgument, - absl::StrCat(node.calculator(), - " is missing the required task options field."), - MediaPipeTasksStatus::kInvalidTaskGraphConfigError); - } + MP_RETURN_IF_ERROR( + tasks::core::TaskApiFactory::CheckHasValidOptions(node)); found_task_subgraph = true; } } diff --git a/mediapipe/tasks/cc/core/task_api_factory.h b/mediapipe/tasks/cc/core/task_api_factory.h index 6c55a6567..6f604dd4c 100644 --- a/mediapipe/tasks/cc/core/task_api_factory.h +++ b/mediapipe/tasks/cc/core/task_api_factory.h @@ -81,7 +81,6 @@ class TaskApiFactory { return std::make_unique(std::move(runner)); } - private: template static absl::Status CheckHasValidOptions( const CalculatorGraphConfig::Node& node) { diff --git a/mediapipe/tasks/cc/vision/core/BUILD b/mediapipe/tasks/cc/vision/core/BUILD index 59fb45622..6bcf2f5d6 100644 --- a/mediapipe/tasks/cc/vision/core/BUILD +++ b/mediapipe/tasks/cc/vision/core/BUILD @@ -43,6 +43,7 @@ cc_library( "//mediapipe/framework/formats:rect_cc_proto", "//mediapipe/tasks/cc/components/containers:rect", "//mediapipe/tasks/cc/core:base_task_api", + "//mediapipe/tasks/cc/core:task_api_factory", "//mediapipe/tasks/cc/core:task_runner", "//mediapipe/tasks/cc/vision/utils:image_tensor_specs", "@com_google_absl//absl/status", @@ -58,6 +59,7 @@ cc_library( ":base_vision_task_api", "//mediapipe/calculators/core:flow_limiter_calculator", "//mediapipe/framework:calculator_cc_proto", + "//mediapipe/tasks/cc/core:task_api_factory", "@com_google_absl//absl/status", "@com_google_absl//absl/status:statusor", "@com_google_absl//absl/strings", diff --git a/mediapipe/tasks/cc/vision/core/vision_task_api_factory.h b/mediapipe/tasks/cc/vision/core/vision_task_api_factory.h index c68e432c3..48fc33848 100644 --- a/mediapipe/tasks/cc/vision/core/vision_task_api_factory.h +++ b/mediapipe/tasks/cc/vision/core/vision_task_api_factory.h @@ -26,6 +26,7 @@ limitations under the License. #include "absl/status/statusor.h" #include "absl/strings/str_cat.h" #include "mediapipe/framework/calculator.pb.h" +#include "mediapipe/tasks/cc/core/task_api_factory.h" #include "mediapipe/tasks/cc/vision/core/base_vision_task_api.h" #include "tensorflow/lite/core/api/op_resolver.h" @@ -60,13 +61,8 @@ class VisionTaskApiFactory { "Task graph config should only contain one task subgraph node.", MediaPipeTasksStatus::kInvalidTaskGraphConfigError); } else { - if (!node.options().HasExtension(Options::ext)) { - return CreateStatusWithPayload( - absl::StatusCode::kInvalidArgument, - absl::StrCat(node.calculator(), - " is missing the required task options field."), - MediaPipeTasksStatus::kInvalidTaskGraphConfigError); - } + MP_RETURN_IF_ERROR( + tasks::core::TaskApiFactory::CheckHasValidOptions(node)); found_task_subgraph = true; } } From f63c00b3c62a95fa47d5bfcfc5104c373fd0605b Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Thu, 8 Jun 2023 18:09:42 +0530 Subject: [PATCH 036/106] Added hand landmarker implementation file and hand landmarker connections --- .../tasks/ios/vision/hand_landmarker/BUILD | 16 + .../sources/MPPHandLandmarker.mm | 288 ++++++++++++++++++ .../sources/MPPHandLandmarksConnections.h | 58 ++++ 3 files changed, 362 insertions(+) create mode 100644 mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarker.mm create mode 100644 mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarksConnections.h diff --git a/mediapipe/tasks/ios/vision/hand_landmarker/BUILD b/mediapipe/tasks/ios/vision/hand_landmarker/BUILD index 6c815f28d..d081ab96c 100644 --- a/mediapipe/tasks/ios/vision/hand_landmarker/BUILD +++ b/mediapipe/tasks/ios/vision/hand_landmarker/BUILD @@ -38,8 +38,15 @@ objc_library( ], ) +objc_library( + name = "MPPHandLandmarksConnections", + hdrs = ["sources/MPPHandLandmarksConnections.h"], + module_name = "MPPHandLandmarksConnections", +) + objc_library( name = "MPPHandLandmarker", + srcs = ["sources/MPPHandLandmarker.mm"], hdrs = ["sources/MPPHandLandmarker.h"], copts = [ "-ObjC++", @@ -48,10 +55,19 @@ objc_library( ], module_name = "MPPHandLandmarker", deps = [ + ":MPPHandLandmarksConnections", ":MPPHandLandmarkerOptions", ":MPPHandLandmarkerResult", "//mediapipe/tasks/ios/components/containers:MPPConnection", "//mediapipe/tasks/ios/vision/core:MPPImage", + "//mediapipe/tasks/cc/vision/hand_landmarker:hand_landmarker_graph", + "//mediapipe/tasks/ios/common/utils:MPPCommonUtils", + "//mediapipe/tasks/ios/common/utils:NSStringHelpers", + "//mediapipe/tasks/ios/core:MPPTaskInfo", + "//mediapipe/tasks/ios/vision/core:MPPVisionPacketCreator", + "//mediapipe/tasks/ios/vision/core:MPPVisionTaskRunner", + "//mediapipe/tasks/ios/vision/hand_landmarker/utils:MPPHandLandmarkerOptionsHelpers", + "//mediapipe/tasks/ios/vision/hand_landmarker/utils:MPPHandLandmarkerResultHelpers", ], ) diff --git a/mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarker.mm b/mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarker.mm new file mode 100644 index 000000000..143b9bd23 --- /dev/null +++ b/mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarker.mm @@ -0,0 +1,288 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import "mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarker.h" + +#import "mediapipe/tasks/ios/common/utils/sources/MPPCommonUtils.h" +#import "mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.h" +#import "mediapipe/tasks/ios/core/sources/MPPTaskInfo.h" +#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionPacketCreator.h" +#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunner.h" +#import "mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarksConnections.h" +#import "mediapipe/tasks/ios/vision/hand_landmarker/utils/sources/MPPHandLandmarkerOptions+Helpers.h" +#import "mediapipe/tasks/ios/vision/hand_landmarker/utils/sources/MPPHandLandmarkerResult+Helpers.h" + +namespace { +using ::mediapipe::NormalizedRect; +using ::mediapipe::Packet; +using ::mediapipe::Timestamp; +using ::mediapipe::tasks::core::PacketMap; +using ::mediapipe::tasks::core::PacketsCallback; +} // namespace + +static NSString *const kImageTag = @"IMAGE"; +static NSString *const kImageInStreamName = @"image_in"; +static NSString *const kNormRectTag = @"NORM_RECT"; +static NSString *const kNormRectInStreamName = @"norm_rect_in"; +static NSString *const kImageOutStreamName = @"image_out"; +static NSString *const kLandmarksTag = @"LANDMARKS"; +static NSString *const kLandmarksOutStreamName = @"hand_landmarks"; +static NSString *const kWorldLandmarksTag = @"WORLD_LANDMARKS"; +static NSString *const kWorldLandmarksOutStreamName = @"world_hand_landmarks"; +static NSString *const kHandednessTag = @"HANDEDNESS"; +static NSString *const kHandednessOutStreamName = @"handedness"; +static NSString *const kTaskGraphName = + @"mediapipe.tasks.vision.hand_landmarker.HandLandmarkerGraph"; +static NSString *const kTaskName = @"handLandmarker"; + +#define InputPacketMap(imagePacket, normalizedRectPacket) \ + { \ + {kImageInStreamName.cppString, imagePacket}, { \ + kNormRectInStreamName.cppString, normalizedRectPacket \ + } \ + } + +@interface MPPHandLandmarker () { + /** iOS Vision Task Runner */ + MPPVisionTaskRunner *_visionTaskRunner; + dispatch_queue_t _callbackQueue; +} +@property(nonatomic, weak) id handLandmarkerLiveStreamDelegate; +@end + +@implementation MPPHandLandmarker + +- (nullable MPPHandLandmarkerResult *)handLandmarkerResultWithOutputPacketMap: + (PacketMap &)outputPacketMap { + return [MPPHandLandmarkerResult + handLandmarkerResultWithLandmarksPacket:outputPacketMap[kLandmarksOutStreamName.cppString] + worldLandmarksPacket:outputPacketMap[kWorldLandmarksOutStreamName + .cppString] + handednessPacket:outputPacketMap[kHandednessOutStreamName.cppString]]; +} + +- (void)processLiveStreamResult:(absl::StatusOr)liveStreamResult { + if (![self.handLandmarkerLiveStreamDelegate + respondsToSelector:@selector(handLandmarker: + didFinishDetectionWithResult:timestampInMilliseconds:error:)]) { + return; + } + + NSError *callbackError = nil; + if (![MPPCommonUtils checkCppError:liveStreamResult.status() toError:&callbackError]) { + dispatch_async(_callbackQueue, ^{ + [self.handLandmarkerLiveStreamDelegate handLandmarker:self + didFinishDetectionWithResult:nil + timestampInMilliseconds:Timestamp::Unset().Value() + error:callbackError]; + }); + return; + } + + PacketMap &outputPacketMap = liveStreamResult.value(); + if (outputPacketMap[kImageOutStreamName.cppString].IsEmpty()) { + return; + } + + MPPHandLandmarkerResult *result = [self handLandmarkerResultWithOutputPacketMap:outputPacketMap]; + + NSInteger timeStampInMilliseconds = + outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() / + kMicroSecondsPerMilliSecond; + dispatch_async(_callbackQueue, ^{ + [self.handLandmarkerLiveStreamDelegate handLandmarker:self + didFinishDetectionWithResult:result + timestampInMilliseconds:timeStampInMilliseconds + error:callbackError]; + }); +} + +- (instancetype)initWithOptions:(MPPHandLandmarkerOptions *)options error:(NSError **)error { + self = [super init]; + if (self) { + MPPTaskInfo *taskInfo = [[MPPTaskInfo alloc] + initWithTaskGraphName:kTaskGraphName + inputStreams:@[ + [NSString stringWithFormat:@"%@:%@", kImageTag, kImageInStreamName], + [NSString stringWithFormat:@"%@:%@", kNormRectTag, kNormRectInStreamName] + ] + outputStreams:@[ + [NSString stringWithFormat:@"%@:%@", kLandmarksTag, kLandmarksOutStreamName], + [NSString + stringWithFormat:@"%@:%@", kWorldLandmarksTag, kWorldLandmarksOutStreamName], + [NSString stringWithFormat:@"%@:%@", kHandednessTag, kHandednessOutStreamName], + [NSString stringWithFormat:@"%@:%@", kImageTag, kImageOutStreamName] + ] + taskOptions:options + enableFlowLimiting:options.runningMode == MPPRunningModeLiveStream + error:error]; + + if (!taskInfo) { + return nil; + } + + PacketsCallback packetsCallback = nullptr; + + if (options.handLandmarkerLiveStreamDelegate) { + _handLandmarkerLiveStreamDelegate = options.handLandmarkerLiveStreamDelegate; + + // Create a private serial dispatch queue in which the deleagte method will be called + // asynchronously. This is to ensure that if the client performs a long running operation in + // the delegate method, the queue on which the C++ callbacks is invoked is not blocked and is + // freed up to continue with its operations. + _callbackQueue = dispatch_queue_create( + [MPPVisionTaskRunner uniqueDispatchQueueNameWithSuffix:kTaskName], NULL); + + // Capturing `self` as weak in order to avoid `self` being kept in memory + // and cause a retain cycle, after self is set to `nil`. + MPPHandLandmarker *__weak weakSelf = self; + packetsCallback = [=](absl::StatusOr liveStreamResult) { + [weakSelf processLiveStreamResult:liveStreamResult]; + }; + } + + _visionTaskRunner = + [[MPPVisionTaskRunner alloc] initWithCalculatorGraphConfig:[taskInfo generateGraphConfig] + runningMode:options.runningMode + packetsCallback:std::move(packetsCallback) + error:error]; + if (!_visionTaskRunner) { + return nil; + } + } + return self; +} + +- (instancetype)initWithModelPath:(NSString *)modelPath error:(NSError **)error { + MPPHandLandmarkerOptions *options = [[MPPHandLandmarkerOptions alloc] init]; + + options.baseOptions.modelAssetPath = modelPath; + + return [self initWithOptions:options error:error]; +} + +- (nullable MPPHandLandmarkerResult *)handLandmarkerResultWithOptionalOutputPacketMap: + (std::optional &)outputPacketMap { + if (!outputPacketMap.has_value()) { + return nil; + } + MPPHandLandmarkerResult *result = + [self handLandmarkerResultWithOutputPacketMap:outputPacketMap.value()]; + return result; +} + +- (nullable MPPHandLandmarkerResult *)detectInImage:(MPPImage *)image error:(NSError **)error { + std::optional rect = + [_visionTaskRunner normalizedRectWithImageOrientation:image.orientation + imageSize:CGSizeMake(image.width, image.height) + error:error]; + if (!rect.has_value()) { + return nil; + } + + Packet imagePacket = [MPPVisionPacketCreator createPacketWithMPPImage:image error:error]; + if (imagePacket.IsEmpty()) { + return nil; + } + + Packet normalizedRectPacket = + [MPPVisionPacketCreator createPacketWithNormalizedRect:rect.value()]; + + PacketMap inputPacketMap = InputPacketMap(imagePacket, normalizedRectPacket); + + std::optional outputPacketMap = [_visionTaskRunner processImagePacketMap:inputPacketMap + error:error]; + return [self handLandmarkerResultWithOptionalOutputPacketMap:outputPacketMap]; +} + +- (std::optional)inputPacketMapWithMPPImage:(MPPImage *)image + timestampInMilliseconds:(NSInteger)timestampInMilliseconds + error:(NSError **)error { + std::optional rect = + [_visionTaskRunner normalizedRectWithImageOrientation:image.orientation + imageSize:CGSizeMake(image.width, image.height) + error:error]; + if (!rect.has_value()) { + return std::nullopt; + } + + Packet imagePacket = [MPPVisionPacketCreator createPacketWithMPPImage:image + timestampInMilliseconds:timestampInMilliseconds + error:error]; + if (imagePacket.IsEmpty()) { + return std::nullopt; + } + + Packet normalizedRectPacket = + [MPPVisionPacketCreator createPacketWithNormalizedRect:rect.value() + timestampInMilliseconds:timestampInMilliseconds]; + + PacketMap inputPacketMap = InputPacketMap(imagePacket, normalizedRectPacket); + return inputPacketMap; +} + +- (nullable MPPHandLandmarkerResult *)detectInVideoFrame:(MPPImage *)image + timestampInMilliseconds:(NSInteger)timestampInMilliseconds + error:(NSError **)error { + std::optional inputPacketMap = [self inputPacketMapWithMPPImage:image + timestampInMilliseconds:timestampInMilliseconds + error:error]; + if (!inputPacketMap.has_value()) { + return nil; + } + + std::optional outputPacketMap = + [_visionTaskRunner processVideoFramePacketMap:inputPacketMap.value() error:error]; + + return [self handLandmarkerResultWithOptionalOutputPacketMap:outputPacketMap]; +} + +- (BOOL)detectAsyncInImage:(MPPImage *)image + timestampInMilliseconds:(NSInteger)timestampInMilliseconds + error:(NSError **)error { + std::optional inputPacketMap = [self inputPacketMapWithMPPImage:image + timestampInMilliseconds:timestampInMilliseconds + error:error]; + if (!inputPacketMap.has_value()) { + return NO; + } + + return [_visionTaskRunner processLiveStreamPacketMap:inputPacketMap.value() error:error]; +} + ++ (NSArray *)handPalmConnections { + return kHandPalmConnections; +} + ++ (NSArray *)handIndexFingerConnections { + return kHandIndexFingerConnections; +} + ++ (NSArray *)handMiddleFingerConnections { + return kHandMiddleFingerConnections; +} + ++ (NSArray *)handRingFingerConnections { + return kHandRingFingerConnections; +} + ++ (NSArray *)handPinkyConnections { + return kHandPinkyConnections; +} + ++ (NSArray *)handConnections { + return kHandConnections; +} + +@end diff --git a/mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarksConnections.h b/mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarksConnections.h new file mode 100644 index 000000000..fe356fbee --- /dev/null +++ b/mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarksConnections.h @@ -0,0 +1,58 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import +#import "mediapipe/tasks/ios/components/containers/sources/MPPConnection.h" + +NS_ASSUME_NONNULL_BEGIN + +NSArray *const kHandPalmConnections = @[ + [[MPPConnection alloc] initWithStart:0 end:1], [[MPPConnection alloc] initWithStart:0 end:5], + [[MPPConnection alloc] initWithStart:9 end:13], [[MPPConnection alloc] initWithStart:13 end:17], + [[MPPConnection alloc] initWithStart:5 end:9], [[MPPConnection alloc] initWithStart:0 end:17] +]; + +NSArray *const kHandThumbConnections = @[ + [[MPPConnection alloc] initWithStart:1 end:2], [[MPPConnection alloc] initWithStart:2 end:3], + [[MPPConnection alloc] initWithStart:3 end:4] +]; + +NSArray *const kHandIndexFingerConnections = @[ + [[MPPConnection alloc] initWithStart:5 end:6], [[MPPConnection alloc] initWithStart:6 end:7], + [[MPPConnection alloc] initWithStart:7 end:8] +]; + +NSArray *const kHandMiddleFingerConnections = @[ + [[MPPConnection alloc] initWithStart:9 end:10], [[MPPConnection alloc] initWithStart:10 end:11], + [[MPPConnection alloc] initWithStart:11 end:12] +]; + +NSArray *const kHandRingFingerConnections = @[ + [[MPPConnection alloc] initWithStart:13 end:14], [[MPPConnection alloc] initWithStart:14 end:15], + [[MPPConnection alloc] initWithStart:15 end:16] +]; + +NSArray *const kHandPinkyConnections = @[ + [[MPPConnection alloc] initWithStart:16 end:17], [[MPPConnection alloc] initWithStart:17 end:18], + [[MPPConnection alloc] initWithStart:18 end:19] +]; + +NSArray *const kHandConnections = [[[[[[NSArray + arrayWithArray:kHandPalmConnections] arrayByAddingObjectsFromArray:kHandThumbConnections] + arrayByAddingObjectsFromArray:kHandIndexFingerConnections] + arrayByAddingObjectsFromArray:kHandMiddleFingerConnections] + arrayByAddingObjectsFromArray:kHandRingFingerConnections] + arrayByAddingObjectsFromArray:kHandPinkyConnections]; + +NS_ASSUME_NONNULL_END From f528fa5de20a6eb8b43e7d2bd8d668ebf23b456f Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Fri, 9 Jun 2023 17:30:23 +0530 Subject: [PATCH 037/106] Updated constant names in MPPHandLandmarkConnections --- .../hand_landmarker/sources/MPPHandLandmarker.mm | 12 ++++++------ .../sources/MPPHandLandmarksConnections.h | 14 +++++++------- 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarker.mm b/mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarker.mm index 143b9bd23..46a6d1f33 100644 --- a/mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarker.mm +++ b/mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarker.mm @@ -262,27 +262,27 @@ static NSString *const kTaskName = @"handLandmarker"; } + (NSArray *)handPalmConnections { - return kHandPalmConnections; + return MPPHandPalmConnections; } + (NSArray *)handIndexFingerConnections { - return kHandIndexFingerConnections; + return MPPHandIndexFingerConnections; } + (NSArray *)handMiddleFingerConnections { - return kHandMiddleFingerConnections; + return MPPHandMiddleFingerConnections; } + (NSArray *)handRingFingerConnections { - return kHandRingFingerConnections; + return MPPHandRingFingerConnections; } + (NSArray *)handPinkyConnections { - return kHandPinkyConnections; + return MPPHandPinkyConnections; } + (NSArray *)handConnections { - return kHandConnections; + return MPPHandConnections; } @end diff --git a/mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarksConnections.h b/mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarksConnections.h index fe356fbee..7027c4fff 100644 --- a/mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarksConnections.h +++ b/mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarksConnections.h @@ -17,38 +17,38 @@ NS_ASSUME_NONNULL_BEGIN -NSArray *const kHandPalmConnections = @[ +NSArray *const MPPHandPalmConnections = @[ [[MPPConnection alloc] initWithStart:0 end:1], [[MPPConnection alloc] initWithStart:0 end:5], [[MPPConnection alloc] initWithStart:9 end:13], [[MPPConnection alloc] initWithStart:13 end:17], [[MPPConnection alloc] initWithStart:5 end:9], [[MPPConnection alloc] initWithStart:0 end:17] ]; -NSArray *const kHandThumbConnections = @[ +NSArray *const MPPHandThumbConnections = @[ [[MPPConnection alloc] initWithStart:1 end:2], [[MPPConnection alloc] initWithStart:2 end:3], [[MPPConnection alloc] initWithStart:3 end:4] ]; -NSArray *const kHandIndexFingerConnections = @[ +NSArray *const MPPHandIndexFingerConnections = @[ [[MPPConnection alloc] initWithStart:5 end:6], [[MPPConnection alloc] initWithStart:6 end:7], [[MPPConnection alloc] initWithStart:7 end:8] ]; -NSArray *const kHandMiddleFingerConnections = @[ +NSArray *const MPPHandMiddleFingerConnections = @[ [[MPPConnection alloc] initWithStart:9 end:10], [[MPPConnection alloc] initWithStart:10 end:11], [[MPPConnection alloc] initWithStart:11 end:12] ]; -NSArray *const kHandRingFingerConnections = @[ +NSArray *const MPPHandRingFingerConnections = @[ [[MPPConnection alloc] initWithStart:13 end:14], [[MPPConnection alloc] initWithStart:14 end:15], [[MPPConnection alloc] initWithStart:15 end:16] ]; -NSArray *const kHandPinkyConnections = @[ +NSArray *const MPPHandPinkyConnections = @[ [[MPPConnection alloc] initWithStart:16 end:17], [[MPPConnection alloc] initWithStart:17 end:18], [[MPPConnection alloc] initWithStart:18 end:19] ]; -NSArray *const kHandConnections = [[[[[[NSArray +NSArray *const MPPHandConnections = [[[[[[NSArray arrayWithArray:kHandPalmConnections] arrayByAddingObjectsFromArray:kHandThumbConnections] arrayByAddingObjectsFromArray:kHandIndexFingerConnections] arrayByAddingObjectsFromArray:kHandMiddleFingerConnections] From 67c5d8d224933f7a898fb6f5684921ae049bd6d3 Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Fri, 9 Jun 2023 13:13:16 -0700 Subject: [PATCH 038/106] Add FaceLandmarker constants for iOS PiperOrigin-RevId: 539160195 --- .../tasks/ios/vision/face_landmarker/BUILD | 9 + .../sources/MPPFaceLandmarker.h | 78 + .../sources/MPPFaceLandmarker.mm | 45 + .../sources/MPPFaceLandmarksConnections.h | 2755 +++++++++++++++++ 4 files changed, 2887 insertions(+) create mode 100644 mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarksConnections.h diff --git a/mediapipe/tasks/ios/vision/face_landmarker/BUILD b/mediapipe/tasks/ios/vision/face_landmarker/BUILD index 8e62ee216..24a656b98 100644 --- a/mediapipe/tasks/ios/vision/face_landmarker/BUILD +++ b/mediapipe/tasks/ios/vision/face_landmarker/BUILD @@ -43,6 +43,13 @@ objc_library( ], ) +objc_library( + name = "MPPFaceLandmarksConnections", + hdrs = ["sources/MPPFaceLandmarksConnections.h"], + module_name = "MPPFaceLandmarksConnections", + deps = ["//mediapipe/tasks/ios/components/containers:MPPConnection"], +) + objc_library( name = "MPPFaceLandmarker", srcs = ["sources/MPPFaceLandmarker.mm"], @@ -55,9 +62,11 @@ objc_library( deps = [ ":MPPFaceLandmarkerOptions", ":MPPFaceLandmarkerResult", + ":MPPFaceLandmarksConnections", "//mediapipe/tasks/cc/vision/face_landmarker:face_landmarker_graph", "//mediapipe/tasks/ios/common/utils:MPPCommonUtils", "//mediapipe/tasks/ios/common/utils:NSStringHelpers", + "//mediapipe/tasks/ios/components/containers:MPPConnection", "//mediapipe/tasks/ios/core:MPPTaskInfo", "//mediapipe/tasks/ios/vision/core:MPPImage", "//mediapipe/tasks/ios/vision/core:MPPVisionPacketCreator", diff --git a/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarker.h b/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarker.h index fc8bed06f..02bb84ac2 100644 --- a/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarker.h +++ b/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarker.h @@ -14,6 +14,7 @@ #import +#import "mediapipe/tasks/ios/components/containers/sources/MPPConnection.h" #import "mediapipe/tasks/ios/vision/core/sources/MPPImage.h" #import "mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerOptions.h" #import "mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerResult.h" @@ -147,6 +148,83 @@ NS_SWIFT_NAME(FaceLandmarker) error:(NSError **)error NS_SWIFT_NAME(detectAsync(image:timestampInMilliseconds:)); +/** + * Returns the connections between all the landmarks in the lips. + * + * @return An array of connections between all the landmarks in the lips. + */ ++ (NSArray *)lipsConnections; + +/** + * Returns the connections between all the landmarks in the left eye. + * + * @return An array of connections between all the landmarks in the left eye. + */ ++ (NSArray *)leftEyeConnections; + +/** + * Returns the connections between all the landmarks in the left eyebrow. + * + * @return An array of connections between all the landmarks in the left eyebrow. + */ ++ (NSArray *)leftEyebrowConnections; + +/** + * Returns the connections between all the landmarks in the left iris. + * + * @return An array of connections between all the landmarks in the left iris. + */ ++ (NSArray *)leftIrisConnections; + +/** + * Returns the connections between all the landmarks in the right eye. + * + * @return An array of connections between all the landmarks in the right eyr. + */ ++ (NSArray *)rightEyeConnections; + +/** + * Returns the connections between all the landmarks in the right eyebrow. + * + * @return An array of connections between all the landmarks in the right eyebrow. + */ ++ (NSArray *)rightEyebrowConnections; + +/** + * Returns the connections between all the landmarks in the right iris. + * + * @return An array of connections between all the landmarks in the right iris. + */ ++ (NSArray *)rightIrisConnections; + +/** + * Returns the connections between all the landmarks of the face oval. + * + * @return An array of connections between all the landmarks of the face oval. + */ ++ (NSArray *)faceOvalConnections; + +/** + * Returns the connections between making up the contours of the face. + * + * @return An array of connections between all the contours of the face. + */ ++ (NSArray *)contoursConnections; + +/** + * Returns the connections between all the landmarks making up the tesselation of the face. + * + * @return An array of connections between all the landmarks making up the tesselation of the face. + */ ++ (NSArray *)tesselationConnections; + +/** + * Returns the connections between all the landmarks in the face. + * + * @return An array of connections between all the landmarks in the face. + */ ++ (NSArray *)faceConnections; + - (instancetype)init NS_UNAVAILABLE; + (instancetype)new NS_UNAVAILABLE; diff --git a/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarker.mm b/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarker.mm index 33dda7c0e..eec0d66ee 100644 --- a/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarker.mm +++ b/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarker.mm @@ -20,6 +20,7 @@ #import "mediapipe/tasks/ios/core/sources/MPPTaskInfo.h" #import "mediapipe/tasks/ios/vision/core/sources/MPPVisionPacketCreator.h" #import "mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunner.h" +#import "mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarksConnections.h" #import "mediapipe/tasks/ios/vision/face_landmarker/utils/sources/MPPFaceLandmarkerOptions+Helpers.h" #import "mediapipe/tasks/ios/vision/face_landmarker/utils/sources/MPPFaceLandmarkerResult+Helpers.h" @@ -277,4 +278,48 @@ static NSString *const kTaskName = @"faceLandmarker"; }); } ++ (NSArray *)lipsConnections { + return MPPFaceLandmarksLips; +} + ++ (NSArray *)leftEyeConnections { + return MPPFaceLandmarksLeftEye; +} + ++ (NSArray *)leftEyebrowConnections { + return MPPFaceLandmarksLeftEyebrow; +} + ++ (NSArray *)leftIrisConnections { + return MPPFaceLandmarksLeftIris; +} + ++ (NSArray *)rightEyeConnections { + return MPPFaceLandmarksRightEye; +} + ++ (NSArray *)rightEyebrowConnections { + return MPPFaceLandmarksRightEyebrow; +} + ++ (NSArray *)rightIrisConnections { + return MPPFaceLandmarksLeftIris; +} + ++ (NSArray *)faceOvalConnections { + return MPPFaceLandmarksFaceOval; +} + ++ (NSArray *)contoursConnections { + return MPPFaceLandmarksContours; +} + ++ (NSArray *)tesselationConnections { + return MPPFaceLandmarksTesselation; +} + ++ (NSArray *)faceConnections { + return MPPFaceConnections; +} + @end diff --git a/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarksConnections.h b/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarksConnections.h new file mode 100644 index 000000000..9e694476e --- /dev/null +++ b/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarksConnections.h @@ -0,0 +1,2755 @@ + +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import + +#import "mediapipe/tasks/ios/components/containers/sources/MPPConnection.h" + +NS_ASSUME_NONNULL_BEGIN + +// Landmarks for lips +NSArray *const MPPFaceLandmarksLips = @[ + [[MPPConnection alloc] initWithStart:61 end:146], + [[MPPConnection alloc] initWithStart:146 end:91], + [[MPPConnection alloc] initWithStart:91 end:181], + [[MPPConnection alloc] initWithStart:181 end:84], + [[MPPConnection alloc] initWithStart:84 end:17], + [[MPPConnection alloc] initWithStart:17 end:314], + [[MPPConnection alloc] initWithStart:314 end:405], + [[MPPConnection alloc] initWithStart:405 end:321], + [[MPPConnection alloc] initWithStart:321 end:375], + [[MPPConnection alloc] initWithStart:375 end:291], + [[MPPConnection alloc] initWithStart:61 end:185], + [[MPPConnection alloc] initWithStart:185 end:40], + [[MPPConnection alloc] initWithStart:40 end:39], + [[MPPConnection alloc] initWithStart:39 end:37], + [[MPPConnection alloc] initWithStart:37 end:0], + [[MPPConnection alloc] initWithStart:0 end:267], + [[MPPConnection alloc] initWithStart:267 end:269], + [[MPPConnection alloc] initWithStart:269 end:270], + [[MPPConnection alloc] initWithStart:270 end:409], + [[MPPConnection alloc] initWithStart:409 end:291], + [[MPPConnection alloc] initWithStart:78 end:95], + [[MPPConnection alloc] initWithStart:95 end:88], + [[MPPConnection alloc] initWithStart:88 end:178], + [[MPPConnection alloc] initWithStart:178 end:87], + [[MPPConnection alloc] initWithStart:87 end:14], + [[MPPConnection alloc] initWithStart:14 end:317], + [[MPPConnection alloc] initWithStart:317 end:402], + [[MPPConnection alloc] initWithStart:402 end:318], + [[MPPConnection alloc] initWithStart:318 end:324], + [[MPPConnection alloc] initWithStart:324 end:308], + [[MPPConnection alloc] initWithStart:78 end:191], + [[MPPConnection alloc] initWithStart:191 end:80], + [[MPPConnection alloc] initWithStart:80 end:81], + [[MPPConnection alloc] initWithStart:81 end:82], + [[MPPConnection alloc] initWithStart:82 end:13], + [[MPPConnection alloc] initWithStart:13 end:312], + [[MPPConnection alloc] initWithStart:312 end:311], + [[MPPConnection alloc] initWithStart:311 end:310], + [[MPPConnection alloc] initWithStart:310 end:415], + [[MPPConnection alloc] initWithStart:415 end:308] +]; + +// Landmarks for left eye +NSArray *const MPPFaceLandmarksLeftEye = @[ + [[MPPConnection alloc] initWithStart:263 end:249], + [[MPPConnection alloc] initWithStart:249 end:390], + [[MPPConnection alloc] initWithStart:390 end:373], + [[MPPConnection alloc] initWithStart:373 end:374], + [[MPPConnection alloc] initWithStart:374 end:380], + [[MPPConnection alloc] initWithStart:380 end:381], + [[MPPConnection alloc] initWithStart:381 end:382], + [[MPPConnection alloc] initWithStart:382 end:362], + [[MPPConnection alloc] initWithStart:263 end:466], + [[MPPConnection alloc] initWithStart:466 end:388], + [[MPPConnection alloc] initWithStart:388 end:387], + [[MPPConnection alloc] initWithStart:387 end:386], + [[MPPConnection alloc] initWithStart:386 end:385], + [[MPPConnection alloc] initWithStart:385 end:384], + [[MPPConnection alloc] initWithStart:384 end:398], + [[MPPConnection alloc] initWithStart:398 end:362] +]; + +// Landmarks for left eyebrow +NSArray *const MPPFaceLandmarksLeftEyebrow = @[ + [[MPPConnection alloc] initWithStart:276 end:283], + [[MPPConnection alloc] initWithStart:283 end:282], + [[MPPConnection alloc] initWithStart:282 end:295], + [[MPPConnection alloc] initWithStart:295 end:285], + [[MPPConnection alloc] initWithStart:300 end:293], + [[MPPConnection alloc] initWithStart:293 end:334], + [[MPPConnection alloc] initWithStart:334 end:296], + [[MPPConnection alloc] initWithStart:296 end:336] +]; + +// Landmarks for left iris +NSArray *const MPPFaceLandmarksLeftIris = @[ + [[MPPConnection alloc] initWithStart:474 end:475], + [[MPPConnection alloc] initWithStart:475 end:476], + [[MPPConnection alloc] initWithStart:476 end:477], + [[MPPConnection alloc] initWithStart:477 end:474] +]; + +// Landmarks for right eye +NSArray *const MPPFaceLandmarksRightEye = @[ + [[MPPConnection alloc] initWithStart:33 end:7], [[MPPConnection alloc] initWithStart:7 end:163], + [[MPPConnection alloc] initWithStart:163 end:144], + [[MPPConnection alloc] initWithStart:144 end:145], + [[MPPConnection alloc] initWithStart:145 end:153], + [[MPPConnection alloc] initWithStart:153 end:154], + [[MPPConnection alloc] initWithStart:154 end:155], + [[MPPConnection alloc] initWithStart:155 end:133], + [[MPPConnection alloc] initWithStart:33 end:246], + [[MPPConnection alloc] initWithStart:246 end:161], + [[MPPConnection alloc] initWithStart:161 end:160], + [[MPPConnection alloc] initWithStart:160 end:159], + [[MPPConnection alloc] initWithStart:159 end:158], + [[MPPConnection alloc] initWithStart:158 end:157], + [[MPPConnection alloc] initWithStart:157 end:173], + [[MPPConnection alloc] initWithStart:173 end:133] +]; + +// Landmarks for right eeyebrow +NSArray *const MPPFaceLandmarksRightEyebrow = @[ + [[MPPConnection alloc] initWithStart:46 end:53], [[MPPConnection alloc] initWithStart:53 end:52], + [[MPPConnection alloc] initWithStart:52 end:65], [[MPPConnection alloc] initWithStart:65 end:55], + [[MPPConnection alloc] initWithStart:70 end:63], [[MPPConnection alloc] initWithStart:63 end:105], + [[MPPConnection alloc] initWithStart:105 end:66], [[MPPConnection alloc] initWithStart:66 end:107] +]; + +// Landmarks for right iris +NSArray *const MPPFaceLandmarksRightIris = @[ + [[MPPConnection alloc] initWithStart:469 end:470], + [[MPPConnection alloc] initWithStart:470 end:471], + [[MPPConnection alloc] initWithStart:471 end:472], + [[MPPConnection alloc] initWithStart:472 end:469] +]; + +// Landmarks for face oval +NSArray *const MPPFaceLandmarksFaceOval = @[ + [[MPPConnection alloc] initWithStart:10 end:338], + [[MPPConnection alloc] initWithStart:338 end:297], + [[MPPConnection alloc] initWithStart:297 end:332], + [[MPPConnection alloc] initWithStart:332 end:284], + [[MPPConnection alloc] initWithStart:284 end:251], + [[MPPConnection alloc] initWithStart:251 end:389], + [[MPPConnection alloc] initWithStart:389 end:356], + [[MPPConnection alloc] initWithStart:356 end:454], + [[MPPConnection alloc] initWithStart:454 end:323], + [[MPPConnection alloc] initWithStart:323 end:361], + [[MPPConnection alloc] initWithStart:361 end:288], + [[MPPConnection alloc] initWithStart:288 end:397], + [[MPPConnection alloc] initWithStart:397 end:365], + [[MPPConnection alloc] initWithStart:365 end:379], + [[MPPConnection alloc] initWithStart:379 end:378], + [[MPPConnection alloc] initWithStart:378 end:400], + [[MPPConnection alloc] initWithStart:400 end:377], + [[MPPConnection alloc] initWithStart:377 end:152], + [[MPPConnection alloc] initWithStart:152 end:148], + [[MPPConnection alloc] initWithStart:148 end:176], + [[MPPConnection alloc] initWithStart:176 end:149], + [[MPPConnection alloc] initWithStart:149 end:150], + [[MPPConnection alloc] initWithStart:150 end:136], + [[MPPConnection alloc] initWithStart:136 end:172], + [[MPPConnection alloc] initWithStart:172 end:58], + [[MPPConnection alloc] initWithStart:58 end:132], + [[MPPConnection alloc] initWithStart:132 end:93], + [[MPPConnection alloc] initWithStart:93 end:234], + [[MPPConnection alloc] initWithStart:234 end:127], + [[MPPConnection alloc] initWithStart:127 end:162], + [[MPPConnection alloc] initWithStart:162 end:21], + [[MPPConnection alloc] initWithStart:21 end:54], + [[MPPConnection alloc] initWithStart:54 end:103], + [[MPPConnection alloc] initWithStart:103 end:67], + [[MPPConnection alloc] initWithStart:67 end:109], + [[MPPConnection alloc] initWithStart:109 end:10] +]; + +// Landmarks for contours +NSArray *const MPPFaceLandmarksContours = [[[[[[[[NSArray + arrayWithArray:MPPFaceLandmarksLips] arrayByAddingObjectsFromArray:MPPFaceLandmarksLeftEye] + arrayByAddingObjectsFromArray:MPPFaceLandmarksLeftEyebrow] + arrayByAddingObjectsFromArray:MPPFaceLandmarksLeftIris] + arrayByAddingObjectsFromArray:MPPFaceLandmarksRightEye] + arrayByAddingObjectsFromArray:MPPFaceLandmarksRightEyebrow] + arrayByAddingObjectsFromArray:MPPFaceLandmarksRightIris] + arrayByAddingObjectsFromArray:MPPFaceLandmarksFaceOval]; + +// Landmarks for face tesselation +NSArray *const MPPFaceLandmarksTesselation = @[ + [[MPPConnection alloc] initWithStart:127 end:34], + [[MPPConnection alloc] initWithStart:34 end:139], + [[MPPConnection alloc] initWithStart:139 end:127], + [[MPPConnection alloc] initWithStart:11 end:0], + [[MPPConnection alloc] initWithStart:0 end:37], + [[MPPConnection alloc] initWithStart:37 end:11], + [[MPPConnection alloc] initWithStart:232 end:231], + [[MPPConnection alloc] initWithStart:231 end:120], + [[MPPConnection alloc] initWithStart:120 end:232], + [[MPPConnection alloc] initWithStart:72 end:37], + [[MPPConnection alloc] initWithStart:37 end:39], + [[MPPConnection alloc] initWithStart:39 end:72], + [[MPPConnection alloc] initWithStart:128 end:121], + [[MPPConnection alloc] initWithStart:121 end:47], + [[MPPConnection alloc] initWithStart:47 end:128], + [[MPPConnection alloc] initWithStart:232 end:121], + [[MPPConnection alloc] initWithStart:121 end:128], + [[MPPConnection alloc] initWithStart:128 end:232], + [[MPPConnection alloc] initWithStart:104 end:69], + [[MPPConnection alloc] initWithStart:69 end:67], + [[MPPConnection alloc] initWithStart:67 end:104], + [[MPPConnection alloc] initWithStart:175 end:171], + [[MPPConnection alloc] initWithStart:171 end:148], + [[MPPConnection alloc] initWithStart:148 end:175], + [[MPPConnection alloc] initWithStart:118 end:50], + [[MPPConnection alloc] initWithStart:50 end:101], + [[MPPConnection alloc] initWithStart:101 end:118], + [[MPPConnection alloc] initWithStart:73 end:39], + [[MPPConnection alloc] initWithStart:39 end:40], + [[MPPConnection alloc] initWithStart:40 end:73], + [[MPPConnection alloc] initWithStart:9 end:151], + [[MPPConnection alloc] initWithStart:151 end:108], + [[MPPConnection alloc] initWithStart:108 end:9], + [[MPPConnection alloc] initWithStart:48 end:115], + [[MPPConnection alloc] initWithStart:115 end:131], + [[MPPConnection alloc] initWithStart:131 end:48], + [[MPPConnection alloc] initWithStart:194 end:204], + [[MPPConnection alloc] initWithStart:204 end:211], + [[MPPConnection alloc] initWithStart:211 end:194], + [[MPPConnection alloc] initWithStart:74 end:40], + [[MPPConnection alloc] initWithStart:40 end:185], + [[MPPConnection alloc] initWithStart:185 end:74], + [[MPPConnection alloc] initWithStart:80 end:42], + [[MPPConnection alloc] initWithStart:42 end:183], + [[MPPConnection alloc] initWithStart:183 end:80], + [[MPPConnection alloc] initWithStart:40 end:92], + [[MPPConnection alloc] initWithStart:92 end:186], + [[MPPConnection alloc] initWithStart:186 end:40], + [[MPPConnection alloc] initWithStart:230 end:229], + [[MPPConnection alloc] initWithStart:229 end:118], + [[MPPConnection alloc] initWithStart:118 end:230], + [[MPPConnection alloc] initWithStart:202 end:212], + [[MPPConnection alloc] initWithStart:212 end:214], + [[MPPConnection alloc] initWithStart:214 end:202], + [[MPPConnection alloc] initWithStart:83 end:18], + [[MPPConnection alloc] initWithStart:18 end:17], + [[MPPConnection alloc] initWithStart:17 end:83], + [[MPPConnection alloc] initWithStart:76 end:61], + [[MPPConnection alloc] initWithStart:61 end:146], + [[MPPConnection alloc] initWithStart:146 end:76], + [[MPPConnection alloc] initWithStart:160 end:29], + [[MPPConnection alloc] initWithStart:29 end:30], + [[MPPConnection alloc] initWithStart:30 end:160], + [[MPPConnection alloc] initWithStart:56 end:157], + [[MPPConnection alloc] initWithStart:157 end:173], + [[MPPConnection alloc] initWithStart:173 end:56], + [[MPPConnection alloc] initWithStart:106 end:204], + [[MPPConnection alloc] initWithStart:204 end:194], + [[MPPConnection alloc] initWithStart:194 end:106], + [[MPPConnection alloc] initWithStart:135 end:214], + [[MPPConnection alloc] initWithStart:214 end:192], + [[MPPConnection alloc] initWithStart:192 end:135], + [[MPPConnection alloc] initWithStart:203 end:165], + [[MPPConnection alloc] initWithStart:165 end:98], + [[MPPConnection alloc] initWithStart:98 end:203], + [[MPPConnection alloc] initWithStart:21 end:71], + [[MPPConnection alloc] initWithStart:71 end:68], + [[MPPConnection alloc] initWithStart:68 end:21], + [[MPPConnection alloc] initWithStart:51 end:45], + [[MPPConnection alloc] initWithStart:45 end:4], + [[MPPConnection alloc] initWithStart:4 end:51], + [[MPPConnection alloc] initWithStart:144 end:24], + [[MPPConnection alloc] initWithStart:24 end:23], + [[MPPConnection alloc] initWithStart:23 end:144], + [[MPPConnection alloc] initWithStart:77 end:146], + [[MPPConnection alloc] initWithStart:146 end:91], + [[MPPConnection alloc] initWithStart:91 end:77], + [[MPPConnection alloc] initWithStart:205 end:50], + [[MPPConnection alloc] initWithStart:50 end:187], + [[MPPConnection alloc] initWithStart:187 end:205], + [[MPPConnection alloc] initWithStart:201 end:200], + [[MPPConnection alloc] initWithStart:200 end:18], + [[MPPConnection alloc] initWithStart:18 end:201], + [[MPPConnection alloc] initWithStart:91 end:106], + [[MPPConnection alloc] initWithStart:106 end:182], + [[MPPConnection alloc] initWithStart:182 end:91], + [[MPPConnection alloc] initWithStart:90 end:91], + [[MPPConnection alloc] initWithStart:91 end:181], + [[MPPConnection alloc] initWithStart:181 end:90], + [[MPPConnection alloc] initWithStart:85 end:84], + [[MPPConnection alloc] initWithStart:84 end:17], + [[MPPConnection alloc] initWithStart:17 end:85], + [[MPPConnection alloc] initWithStart:206 end:203], + [[MPPConnection alloc] initWithStart:203 end:36], + [[MPPConnection alloc] initWithStart:36 end:206], + [[MPPConnection alloc] initWithStart:148 end:171], + [[MPPConnection alloc] initWithStart:171 end:140], + [[MPPConnection alloc] initWithStart:140 end:148], + [[MPPConnection alloc] initWithStart:92 end:40], + [[MPPConnection alloc] initWithStart:40 end:39], + [[MPPConnection alloc] initWithStart:39 end:92], + [[MPPConnection alloc] initWithStart:193 end:189], + [[MPPConnection alloc] initWithStart:189 end:244], + [[MPPConnection alloc] initWithStart:244 end:193], + [[MPPConnection alloc] initWithStart:159 end:158], + [[MPPConnection alloc] initWithStart:158 end:28], + [[MPPConnection alloc] initWithStart:28 end:159], + [[MPPConnection alloc] initWithStart:247 end:246], + [[MPPConnection alloc] initWithStart:246 end:161], + [[MPPConnection alloc] initWithStart:161 end:247], + [[MPPConnection alloc] initWithStart:236 end:3], + [[MPPConnection alloc] initWithStart:3 end:196], + [[MPPConnection alloc] initWithStart:196 end:236], + [[MPPConnection alloc] initWithStart:54 end:68], + [[MPPConnection alloc] initWithStart:68 end:104], + [[MPPConnection alloc] initWithStart:104 end:54], + [[MPPConnection alloc] initWithStart:193 end:168], + [[MPPConnection alloc] initWithStart:168 end:8], + [[MPPConnection alloc] initWithStart:8 end:193], + [[MPPConnection alloc] initWithStart:117 end:228], + [[MPPConnection alloc] initWithStart:228 end:31], + [[MPPConnection alloc] initWithStart:31 end:117], + [[MPPConnection alloc] initWithStart:189 end:193], + [[MPPConnection alloc] initWithStart:193 end:55], + [[MPPConnection alloc] initWithStart:55 end:189], + [[MPPConnection alloc] initWithStart:98 end:97], + [[MPPConnection alloc] initWithStart:97 end:99], + [[MPPConnection alloc] initWithStart:99 end:98], + [[MPPConnection alloc] initWithStart:126 end:47], + [[MPPConnection alloc] initWithStart:47 end:100], + [[MPPConnection alloc] initWithStart:100 end:126], + [[MPPConnection alloc] initWithStart:166 end:79], + [[MPPConnection alloc] initWithStart:79 end:218], + [[MPPConnection alloc] initWithStart:218 end:166], + [[MPPConnection alloc] initWithStart:155 end:154], + [[MPPConnection alloc] initWithStart:154 end:26], + [[MPPConnection alloc] initWithStart:26 end:155], + [[MPPConnection alloc] initWithStart:209 end:49], + [[MPPConnection alloc] initWithStart:49 end:131], + [[MPPConnection alloc] initWithStart:131 end:209], + [[MPPConnection alloc] initWithStart:135 end:136], + [[MPPConnection alloc] initWithStart:136 end:150], + [[MPPConnection alloc] initWithStart:150 end:135], + [[MPPConnection alloc] initWithStart:47 end:126], + [[MPPConnection alloc] initWithStart:126 end:217], + [[MPPConnection alloc] initWithStart:217 end:47], + [[MPPConnection alloc] initWithStart:223 end:52], + [[MPPConnection alloc] initWithStart:52 end:53], + [[MPPConnection alloc] initWithStart:53 end:223], + [[MPPConnection alloc] initWithStart:45 end:51], + [[MPPConnection alloc] initWithStart:51 end:134], + [[MPPConnection alloc] initWithStart:134 end:45], + [[MPPConnection alloc] initWithStart:211 end:170], + [[MPPConnection alloc] initWithStart:170 end:140], + [[MPPConnection alloc] initWithStart:140 end:211], + [[MPPConnection alloc] initWithStart:67 end:69], + [[MPPConnection alloc] initWithStart:69 end:108], + [[MPPConnection alloc] initWithStart:108 end:67], + [[MPPConnection alloc] initWithStart:43 end:106], + [[MPPConnection alloc] initWithStart:106 end:91], + [[MPPConnection alloc] initWithStart:91 end:43], + [[MPPConnection alloc] initWithStart:230 end:119], + [[MPPConnection alloc] initWithStart:119 end:120], + [[MPPConnection alloc] initWithStart:120 end:230], + [[MPPConnection alloc] initWithStart:226 end:130], + [[MPPConnection alloc] initWithStart:130 end:247], + [[MPPConnection alloc] initWithStart:247 end:226], + [[MPPConnection alloc] initWithStart:63 end:53], + [[MPPConnection alloc] initWithStart:53 end:52], + [[MPPConnection alloc] initWithStart:52 end:63], + [[MPPConnection alloc] initWithStart:238 end:20], + [[MPPConnection alloc] initWithStart:20 end:242], + [[MPPConnection alloc] initWithStart:242 end:238], + [[MPPConnection alloc] initWithStart:46 end:70], + [[MPPConnection alloc] initWithStart:70 end:156], + [[MPPConnection alloc] initWithStart:156 end:46], + [[MPPConnection alloc] initWithStart:78 end:62], + [[MPPConnection alloc] initWithStart:62 end:96], + [[MPPConnection alloc] initWithStart:96 end:78], + [[MPPConnection alloc] initWithStart:46 end:53], + [[MPPConnection alloc] initWithStart:53 end:63], + [[MPPConnection alloc] initWithStart:63 end:46], + [[MPPConnection alloc] initWithStart:143 end:34], + [[MPPConnection alloc] initWithStart:34 end:227], + [[MPPConnection alloc] initWithStart:227 end:143], + [[MPPConnection alloc] initWithStart:123 end:117], + [[MPPConnection alloc] initWithStart:117 end:111], + [[MPPConnection alloc] initWithStart:111 end:123], + [[MPPConnection alloc] initWithStart:44 end:125], + [[MPPConnection alloc] initWithStart:125 end:19], + [[MPPConnection alloc] initWithStart:19 end:44], + [[MPPConnection alloc] initWithStart:236 end:134], + [[MPPConnection alloc] initWithStart:134 end:51], + [[MPPConnection alloc] initWithStart:51 end:236], + [[MPPConnection alloc] initWithStart:216 end:206], + [[MPPConnection alloc] initWithStart:206 end:205], + [[MPPConnection alloc] initWithStart:205 end:216], + [[MPPConnection alloc] initWithStart:154 end:153], + [[MPPConnection alloc] initWithStart:153 end:22], + [[MPPConnection alloc] initWithStart:22 end:154], + [[MPPConnection alloc] initWithStart:39 end:37], + [[MPPConnection alloc] initWithStart:37 end:167], + [[MPPConnection alloc] initWithStart:167 end:39], + [[MPPConnection alloc] initWithStart:200 end:201], + [[MPPConnection alloc] initWithStart:201 end:208], + [[MPPConnection alloc] initWithStart:208 end:200], + [[MPPConnection alloc] initWithStart:36 end:142], + [[MPPConnection alloc] initWithStart:142 end:100], + [[MPPConnection alloc] initWithStart:100 end:36], + [[MPPConnection alloc] initWithStart:57 end:212], + [[MPPConnection alloc] initWithStart:212 end:202], + [[MPPConnection alloc] initWithStart:202 end:57], + [[MPPConnection alloc] initWithStart:20 end:60], + [[MPPConnection alloc] initWithStart:60 end:99], + [[MPPConnection alloc] initWithStart:99 end:20], + [[MPPConnection alloc] initWithStart:28 end:158], + [[MPPConnection alloc] initWithStart:158 end:157], + [[MPPConnection alloc] initWithStart:157 end:28], + [[MPPConnection alloc] initWithStart:35 end:226], + [[MPPConnection alloc] initWithStart:226 end:113], + [[MPPConnection alloc] initWithStart:113 end:35], + [[MPPConnection alloc] initWithStart:160 end:159], + [[MPPConnection alloc] initWithStart:159 end:27], + [[MPPConnection alloc] initWithStart:27 end:160], + [[MPPConnection alloc] initWithStart:204 end:202], + [[MPPConnection alloc] initWithStart:202 end:210], + [[MPPConnection alloc] initWithStart:210 end:204], + [[MPPConnection alloc] initWithStart:113 end:225], + [[MPPConnection alloc] initWithStart:225 end:46], + [[MPPConnection alloc] initWithStart:46 end:113], + [[MPPConnection alloc] initWithStart:43 end:202], + [[MPPConnection alloc] initWithStart:202 end:204], + [[MPPConnection alloc] initWithStart:204 end:43], + [[MPPConnection alloc] initWithStart:62 end:76], + [[MPPConnection alloc] initWithStart:76 end:77], + [[MPPConnection alloc] initWithStart:77 end:62], + [[MPPConnection alloc] initWithStart:137 end:123], + [[MPPConnection alloc] initWithStart:123 end:116], + [[MPPConnection alloc] initWithStart:116 end:137], + [[MPPConnection alloc] initWithStart:41 end:38], + [[MPPConnection alloc] initWithStart:38 end:72], + [[MPPConnection alloc] initWithStart:72 end:41], + [[MPPConnection alloc] initWithStart:203 end:129], + [[MPPConnection alloc] initWithStart:129 end:142], + [[MPPConnection alloc] initWithStart:142 end:203], + [[MPPConnection alloc] initWithStart:64 end:98], + [[MPPConnection alloc] initWithStart:98 end:240], + [[MPPConnection alloc] initWithStart:240 end:64], + [[MPPConnection alloc] initWithStart:49 end:102], + [[MPPConnection alloc] initWithStart:102 end:64], + [[MPPConnection alloc] initWithStart:64 end:49], + [[MPPConnection alloc] initWithStart:41 end:73], + [[MPPConnection alloc] initWithStart:73 end:74], + [[MPPConnection alloc] initWithStart:74 end:41], + [[MPPConnection alloc] initWithStart:212 end:216], + [[MPPConnection alloc] initWithStart:216 end:207], + [[MPPConnection alloc] initWithStart:207 end:212], + [[MPPConnection alloc] initWithStart:42 end:74], + [[MPPConnection alloc] initWithStart:74 end:184], + [[MPPConnection alloc] initWithStart:184 end:42], + [[MPPConnection alloc] initWithStart:169 end:170], + [[MPPConnection alloc] initWithStart:170 end:211], + [[MPPConnection alloc] initWithStart:211 end:169], + [[MPPConnection alloc] initWithStart:170 end:149], + [[MPPConnection alloc] initWithStart:149 end:176], + [[MPPConnection alloc] initWithStart:176 end:170], + [[MPPConnection alloc] initWithStart:105 end:66], + [[MPPConnection alloc] initWithStart:66 end:69], + [[MPPConnection alloc] initWithStart:69 end:105], + [[MPPConnection alloc] initWithStart:122 end:6], + [[MPPConnection alloc] initWithStart:6 end:168], + [[MPPConnection alloc] initWithStart:168 end:122], + [[MPPConnection alloc] initWithStart:123 end:147], + [[MPPConnection alloc] initWithStart:147 end:187], + [[MPPConnection alloc] initWithStart:187 end:123], + [[MPPConnection alloc] initWithStart:96 end:77], + [[MPPConnection alloc] initWithStart:77 end:90], + [[MPPConnection alloc] initWithStart:90 end:96], + [[MPPConnection alloc] initWithStart:65 end:55], + [[MPPConnection alloc] initWithStart:55 end:107], + [[MPPConnection alloc] initWithStart:107 end:65], + [[MPPConnection alloc] initWithStart:89 end:90], + [[MPPConnection alloc] initWithStart:90 end:180], + [[MPPConnection alloc] initWithStart:180 end:89], + [[MPPConnection alloc] initWithStart:101 end:100], + [[MPPConnection alloc] initWithStart:100 end:120], + [[MPPConnection alloc] initWithStart:120 end:101], + [[MPPConnection alloc] initWithStart:63 end:105], + [[MPPConnection alloc] initWithStart:105 end:104], + [[MPPConnection alloc] initWithStart:104 end:63], + [[MPPConnection alloc] initWithStart:93 end:137], + [[MPPConnection alloc] initWithStart:137 end:227], + [[MPPConnection alloc] initWithStart:227 end:93], + [[MPPConnection alloc] initWithStart:15 end:86], + [[MPPConnection alloc] initWithStart:86 end:85], + [[MPPConnection alloc] initWithStart:85 end:15], + [[MPPConnection alloc] initWithStart:129 end:102], + [[MPPConnection alloc] initWithStart:102 end:49], + [[MPPConnection alloc] initWithStart:49 end:129], + [[MPPConnection alloc] initWithStart:14 end:87], + [[MPPConnection alloc] initWithStart:87 end:86], + [[MPPConnection alloc] initWithStart:86 end:14], + [[MPPConnection alloc] initWithStart:55 end:8], + [[MPPConnection alloc] initWithStart:8 end:9], + [[MPPConnection alloc] initWithStart:9 end:55], + [[MPPConnection alloc] initWithStart:100 end:47], + [[MPPConnection alloc] initWithStart:47 end:121], + [[MPPConnection alloc] initWithStart:121 end:100], + [[MPPConnection alloc] initWithStart:145 end:23], + [[MPPConnection alloc] initWithStart:23 end:22], + [[MPPConnection alloc] initWithStart:22 end:145], + [[MPPConnection alloc] initWithStart:88 end:89], + [[MPPConnection alloc] initWithStart:89 end:179], + [[MPPConnection alloc] initWithStart:179 end:88], + [[MPPConnection alloc] initWithStart:6 end:122], + [[MPPConnection alloc] initWithStart:122 end:196], + [[MPPConnection alloc] initWithStart:196 end:6], + [[MPPConnection alloc] initWithStart:88 end:95], + [[MPPConnection alloc] initWithStart:95 end:96], + [[MPPConnection alloc] initWithStart:96 end:88], + [[MPPConnection alloc] initWithStart:138 end:172], + [[MPPConnection alloc] initWithStart:172 end:136], + [[MPPConnection alloc] initWithStart:136 end:138], + [[MPPConnection alloc] initWithStart:215 end:58], + [[MPPConnection alloc] initWithStart:58 end:172], + [[MPPConnection alloc] initWithStart:172 end:215], + [[MPPConnection alloc] initWithStart:115 end:48], + [[MPPConnection alloc] initWithStart:48 end:219], + [[MPPConnection alloc] initWithStart:219 end:115], + [[MPPConnection alloc] initWithStart:42 end:80], + [[MPPConnection alloc] initWithStart:80 end:81], + [[MPPConnection alloc] initWithStart:81 end:42], + [[MPPConnection alloc] initWithStart:195 end:3], + [[MPPConnection alloc] initWithStart:3 end:51], + [[MPPConnection alloc] initWithStart:51 end:195], + [[MPPConnection alloc] initWithStart:43 end:146], + [[MPPConnection alloc] initWithStart:146 end:61], + [[MPPConnection alloc] initWithStart:61 end:43], + [[MPPConnection alloc] initWithStart:171 end:175], + [[MPPConnection alloc] initWithStart:175 end:199], + [[MPPConnection alloc] initWithStart:199 end:171], + [[MPPConnection alloc] initWithStart:81 end:82], + [[MPPConnection alloc] initWithStart:82 end:38], + [[MPPConnection alloc] initWithStart:38 end:81], + [[MPPConnection alloc] initWithStart:53 end:46], + [[MPPConnection alloc] initWithStart:46 end:225], + [[MPPConnection alloc] initWithStart:225 end:53], + [[MPPConnection alloc] initWithStart:144 end:163], + [[MPPConnection alloc] initWithStart:163 end:110], + [[MPPConnection alloc] initWithStart:110 end:144], + [[MPPConnection alloc] initWithStart:52 end:65], + [[MPPConnection alloc] initWithStart:65 end:66], + [[MPPConnection alloc] initWithStart:66 end:52], + [[MPPConnection alloc] initWithStart:229 end:228], + [[MPPConnection alloc] initWithStart:228 end:117], + [[MPPConnection alloc] initWithStart:117 end:229], + [[MPPConnection alloc] initWithStart:34 end:127], + [[MPPConnection alloc] initWithStart:127 end:234], + [[MPPConnection alloc] initWithStart:234 end:34], + [[MPPConnection alloc] initWithStart:107 end:108], + [[MPPConnection alloc] initWithStart:108 end:69], + [[MPPConnection alloc] initWithStart:69 end:107], + [[MPPConnection alloc] initWithStart:109 end:108], + [[MPPConnection alloc] initWithStart:108 end:151], + [[MPPConnection alloc] initWithStart:151 end:109], + [[MPPConnection alloc] initWithStart:48 end:64], + [[MPPConnection alloc] initWithStart:64 end:235], + [[MPPConnection alloc] initWithStart:235 end:48], + [[MPPConnection alloc] initWithStart:62 end:78], + [[MPPConnection alloc] initWithStart:78 end:191], + [[MPPConnection alloc] initWithStart:191 end:62], + [[MPPConnection alloc] initWithStart:129 end:209], + [[MPPConnection alloc] initWithStart:209 end:126], + [[MPPConnection alloc] initWithStart:126 end:129], + [[MPPConnection alloc] initWithStart:111 end:35], + [[MPPConnection alloc] initWithStart:35 end:143], + [[MPPConnection alloc] initWithStart:143 end:111], + [[MPPConnection alloc] initWithStart:117 end:123], + [[MPPConnection alloc] initWithStart:123 end:50], + [[MPPConnection alloc] initWithStart:50 end:117], + [[MPPConnection alloc] initWithStart:222 end:65], + [[MPPConnection alloc] initWithStart:65 end:52], + [[MPPConnection alloc] initWithStart:52 end:222], + [[MPPConnection alloc] initWithStart:19 end:125], + [[MPPConnection alloc] initWithStart:125 end:141], + [[MPPConnection alloc] initWithStart:141 end:19], + [[MPPConnection alloc] initWithStart:221 end:55], + [[MPPConnection alloc] initWithStart:55 end:65], + [[MPPConnection alloc] initWithStart:65 end:221], + [[MPPConnection alloc] initWithStart:3 end:195], + [[MPPConnection alloc] initWithStart:195 end:197], + [[MPPConnection alloc] initWithStart:197 end:3], + [[MPPConnection alloc] initWithStart:25 end:7], + [[MPPConnection alloc] initWithStart:7 end:33], + [[MPPConnection alloc] initWithStart:33 end:25], + [[MPPConnection alloc] initWithStart:220 end:237], + [[MPPConnection alloc] initWithStart:237 end:44], + [[MPPConnection alloc] initWithStart:44 end:220], + [[MPPConnection alloc] initWithStart:70 end:71], + [[MPPConnection alloc] initWithStart:71 end:139], + [[MPPConnection alloc] initWithStart:139 end:70], + [[MPPConnection alloc] initWithStart:122 end:193], + [[MPPConnection alloc] initWithStart:193 end:245], + [[MPPConnection alloc] initWithStart:245 end:122], + [[MPPConnection alloc] initWithStart:247 end:130], + [[MPPConnection alloc] initWithStart:130 end:33], + [[MPPConnection alloc] initWithStart:33 end:247], + [[MPPConnection alloc] initWithStart:71 end:21], + [[MPPConnection alloc] initWithStart:21 end:162], + [[MPPConnection alloc] initWithStart:162 end:71], + [[MPPConnection alloc] initWithStart:170 end:169], + [[MPPConnection alloc] initWithStart:169 end:150], + [[MPPConnection alloc] initWithStart:150 end:170], + [[MPPConnection alloc] initWithStart:188 end:174], + [[MPPConnection alloc] initWithStart:174 end:196], + [[MPPConnection alloc] initWithStart:196 end:188], + [[MPPConnection alloc] initWithStart:216 end:186], + [[MPPConnection alloc] initWithStart:186 end:92], + [[MPPConnection alloc] initWithStart:92 end:216], + [[MPPConnection alloc] initWithStart:2 end:97], + [[MPPConnection alloc] initWithStart:97 end:167], + [[MPPConnection alloc] initWithStart:167 end:2], + [[MPPConnection alloc] initWithStart:141 end:125], + [[MPPConnection alloc] initWithStart:125 end:241], + [[MPPConnection alloc] initWithStart:241 end:141], + [[MPPConnection alloc] initWithStart:164 end:167], + [[MPPConnection alloc] initWithStart:167 end:37], + [[MPPConnection alloc] initWithStart:37 end:164], + [[MPPConnection alloc] initWithStart:72 end:38], + [[MPPConnection alloc] initWithStart:38 end:12], + [[MPPConnection alloc] initWithStart:12 end:72], + [[MPPConnection alloc] initWithStart:38 end:82], + [[MPPConnection alloc] initWithStart:82 end:13], + [[MPPConnection alloc] initWithStart:13 end:38], + [[MPPConnection alloc] initWithStart:63 end:68], + [[MPPConnection alloc] initWithStart:68 end:71], + [[MPPConnection alloc] initWithStart:71 end:63], + [[MPPConnection alloc] initWithStart:226 end:35], + [[MPPConnection alloc] initWithStart:35 end:111], + [[MPPConnection alloc] initWithStart:111 end:226], + [[MPPConnection alloc] initWithStart:101 end:50], + [[MPPConnection alloc] initWithStart:50 end:205], + [[MPPConnection alloc] initWithStart:205 end:101], + [[MPPConnection alloc] initWithStart:206 end:92], + [[MPPConnection alloc] initWithStart:92 end:165], + [[MPPConnection alloc] initWithStart:165 end:206], + [[MPPConnection alloc] initWithStart:209 end:198], + [[MPPConnection alloc] initWithStart:198 end:217], + [[MPPConnection alloc] initWithStart:217 end:209], + [[MPPConnection alloc] initWithStart:165 end:167], + [[MPPConnection alloc] initWithStart:167 end:97], + [[MPPConnection alloc] initWithStart:97 end:165], + [[MPPConnection alloc] initWithStart:220 end:115], + [[MPPConnection alloc] initWithStart:115 end:218], + [[MPPConnection alloc] initWithStart:218 end:220], + [[MPPConnection alloc] initWithStart:133 end:112], + [[MPPConnection alloc] initWithStart:112 end:243], + [[MPPConnection alloc] initWithStart:243 end:133], + [[MPPConnection alloc] initWithStart:239 end:238], + [[MPPConnection alloc] initWithStart:238 end:241], + [[MPPConnection alloc] initWithStart:241 end:239], + [[MPPConnection alloc] initWithStart:214 end:135], + [[MPPConnection alloc] initWithStart:135 end:169], + [[MPPConnection alloc] initWithStart:169 end:214], + [[MPPConnection alloc] initWithStart:190 end:173], + [[MPPConnection alloc] initWithStart:173 end:133], + [[MPPConnection alloc] initWithStart:133 end:190], + [[MPPConnection alloc] initWithStart:171 end:208], + [[MPPConnection alloc] initWithStart:208 end:32], + [[MPPConnection alloc] initWithStart:32 end:171], + [[MPPConnection alloc] initWithStart:125 end:44], + [[MPPConnection alloc] initWithStart:44 end:237], + [[MPPConnection alloc] initWithStart:237 end:125], + [[MPPConnection alloc] initWithStart:86 end:87], + [[MPPConnection alloc] initWithStart:87 end:178], + [[MPPConnection alloc] initWithStart:178 end:86], + [[MPPConnection alloc] initWithStart:85 end:86], + [[MPPConnection alloc] initWithStart:86 end:179], + [[MPPConnection alloc] initWithStart:179 end:85], + [[MPPConnection alloc] initWithStart:84 end:85], + [[MPPConnection alloc] initWithStart:85 end:180], + [[MPPConnection alloc] initWithStart:180 end:84], + [[MPPConnection alloc] initWithStart:83 end:84], + [[MPPConnection alloc] initWithStart:84 end:181], + [[MPPConnection alloc] initWithStart:181 end:83], + [[MPPConnection alloc] initWithStart:201 end:83], + [[MPPConnection alloc] initWithStart:83 end:182], + [[MPPConnection alloc] initWithStart:182 end:201], + [[MPPConnection alloc] initWithStart:137 end:93], + [[MPPConnection alloc] initWithStart:93 end:132], + [[MPPConnection alloc] initWithStart:132 end:137], + [[MPPConnection alloc] initWithStart:76 end:62], + [[MPPConnection alloc] initWithStart:62 end:183], + [[MPPConnection alloc] initWithStart:183 end:76], + [[MPPConnection alloc] initWithStart:61 end:76], + [[MPPConnection alloc] initWithStart:76 end:184], + [[MPPConnection alloc] initWithStart:184 end:61], + [[MPPConnection alloc] initWithStart:57 end:61], + [[MPPConnection alloc] initWithStart:61 end:185], + [[MPPConnection alloc] initWithStart:185 end:57], + [[MPPConnection alloc] initWithStart:212 end:57], + [[MPPConnection alloc] initWithStart:57 end:186], + [[MPPConnection alloc] initWithStart:186 end:212], + [[MPPConnection alloc] initWithStart:214 end:207], + [[MPPConnection alloc] initWithStart:207 end:187], + [[MPPConnection alloc] initWithStart:187 end:214], + [[MPPConnection alloc] initWithStart:34 end:143], + [[MPPConnection alloc] initWithStart:143 end:156], + [[MPPConnection alloc] initWithStart:156 end:34], + [[MPPConnection alloc] initWithStart:79 end:239], + [[MPPConnection alloc] initWithStart:239 end:237], + [[MPPConnection alloc] initWithStart:237 end:79], + [[MPPConnection alloc] initWithStart:123 end:137], + [[MPPConnection alloc] initWithStart:137 end:177], + [[MPPConnection alloc] initWithStart:177 end:123], + [[MPPConnection alloc] initWithStart:44 end:1], + [[MPPConnection alloc] initWithStart:1 end:4], + [[MPPConnection alloc] initWithStart:4 end:44], + [[MPPConnection alloc] initWithStart:201 end:194], + [[MPPConnection alloc] initWithStart:194 end:32], + [[MPPConnection alloc] initWithStart:32 end:201], + [[MPPConnection alloc] initWithStart:64 end:102], + [[MPPConnection alloc] initWithStart:102 end:129], + [[MPPConnection alloc] initWithStart:129 end:64], + [[MPPConnection alloc] initWithStart:213 end:215], + [[MPPConnection alloc] initWithStart:215 end:138], + [[MPPConnection alloc] initWithStart:138 end:213], + [[MPPConnection alloc] initWithStart:59 end:166], + [[MPPConnection alloc] initWithStart:166 end:219], + [[MPPConnection alloc] initWithStart:219 end:59], + [[MPPConnection alloc] initWithStart:242 end:99], + [[MPPConnection alloc] initWithStart:99 end:97], + [[MPPConnection alloc] initWithStart:97 end:242], + [[MPPConnection alloc] initWithStart:2 end:94], + [[MPPConnection alloc] initWithStart:94 end:141], + [[MPPConnection alloc] initWithStart:141 end:2], + [[MPPConnection alloc] initWithStart:75 end:59], + [[MPPConnection alloc] initWithStart:59 end:235], + [[MPPConnection alloc] initWithStart:235 end:75], + [[MPPConnection alloc] initWithStart:24 end:110], + [[MPPConnection alloc] initWithStart:110 end:228], + [[MPPConnection alloc] initWithStart:228 end:24], + [[MPPConnection alloc] initWithStart:25 end:130], + [[MPPConnection alloc] initWithStart:130 end:226], + [[MPPConnection alloc] initWithStart:226 end:25], + [[MPPConnection alloc] initWithStart:23 end:24], + [[MPPConnection alloc] initWithStart:24 end:229], + [[MPPConnection alloc] initWithStart:229 end:23], + [[MPPConnection alloc] initWithStart:22 end:23], + [[MPPConnection alloc] initWithStart:23 end:230], + [[MPPConnection alloc] initWithStart:230 end:22], + [[MPPConnection alloc] initWithStart:26 end:22], + [[MPPConnection alloc] initWithStart:22 end:231], + [[MPPConnection alloc] initWithStart:231 end:26], + [[MPPConnection alloc] initWithStart:112 end:26], + [[MPPConnection alloc] initWithStart:26 end:232], + [[MPPConnection alloc] initWithStart:232 end:112], + [[MPPConnection alloc] initWithStart:189 end:190], + [[MPPConnection alloc] initWithStart:190 end:243], + [[MPPConnection alloc] initWithStart:243 end:189], + [[MPPConnection alloc] initWithStart:221 end:56], + [[MPPConnection alloc] initWithStart:56 end:190], + [[MPPConnection alloc] initWithStart:190 end:221], + [[MPPConnection alloc] initWithStart:28 end:56], + [[MPPConnection alloc] initWithStart:56 end:221], + [[MPPConnection alloc] initWithStart:221 end:28], + [[MPPConnection alloc] initWithStart:27 end:28], + [[MPPConnection alloc] initWithStart:28 end:222], + [[MPPConnection alloc] initWithStart:222 end:27], + [[MPPConnection alloc] initWithStart:29 end:27], + [[MPPConnection alloc] initWithStart:27 end:223], + [[MPPConnection alloc] initWithStart:223 end:29], + [[MPPConnection alloc] initWithStart:30 end:29], + [[MPPConnection alloc] initWithStart:29 end:224], + [[MPPConnection alloc] initWithStart:224 end:30], + [[MPPConnection alloc] initWithStart:247 end:30], + [[MPPConnection alloc] initWithStart:30 end:225], + [[MPPConnection alloc] initWithStart:225 end:247], + [[MPPConnection alloc] initWithStart:238 end:79], + [[MPPConnection alloc] initWithStart:79 end:20], + [[MPPConnection alloc] initWithStart:20 end:238], + [[MPPConnection alloc] initWithStart:166 end:59], + [[MPPConnection alloc] initWithStart:59 end:75], + [[MPPConnection alloc] initWithStart:75 end:166], + [[MPPConnection alloc] initWithStart:60 end:75], + [[MPPConnection alloc] initWithStart:75 end:240], + [[MPPConnection alloc] initWithStart:240 end:60], + [[MPPConnection alloc] initWithStart:147 end:177], + [[MPPConnection alloc] initWithStart:177 end:215], + [[MPPConnection alloc] initWithStart:215 end:147], + [[MPPConnection alloc] initWithStart:20 end:79], + [[MPPConnection alloc] initWithStart:79 end:166], + [[MPPConnection alloc] initWithStart:166 end:20], + [[MPPConnection alloc] initWithStart:187 end:147], + [[MPPConnection alloc] initWithStart:147 end:213], + [[MPPConnection alloc] initWithStart:213 end:187], + [[MPPConnection alloc] initWithStart:112 end:233], + [[MPPConnection alloc] initWithStart:233 end:244], + [[MPPConnection alloc] initWithStart:244 end:112], + [[MPPConnection alloc] initWithStart:233 end:128], + [[MPPConnection alloc] initWithStart:128 end:245], + [[MPPConnection alloc] initWithStart:245 end:233], + [[MPPConnection alloc] initWithStart:128 end:114], + [[MPPConnection alloc] initWithStart:114 end:188], + [[MPPConnection alloc] initWithStart:188 end:128], + [[MPPConnection alloc] initWithStart:114 end:217], + [[MPPConnection alloc] initWithStart:217 end:174], + [[MPPConnection alloc] initWithStart:174 end:114], + [[MPPConnection alloc] initWithStart:131 end:115], + [[MPPConnection alloc] initWithStart:115 end:220], + [[MPPConnection alloc] initWithStart:220 end:131], + [[MPPConnection alloc] initWithStart:217 end:198], + [[MPPConnection alloc] initWithStart:198 end:236], + [[MPPConnection alloc] initWithStart:236 end:217], + [[MPPConnection alloc] initWithStart:198 end:131], + [[MPPConnection alloc] initWithStart:131 end:134], + [[MPPConnection alloc] initWithStart:134 end:198], + [[MPPConnection alloc] initWithStart:177 end:132], + [[MPPConnection alloc] initWithStart:132 end:58], + [[MPPConnection alloc] initWithStart:58 end:177], + [[MPPConnection alloc] initWithStart:143 end:35], + [[MPPConnection alloc] initWithStart:35 end:124], + [[MPPConnection alloc] initWithStart:124 end:143], + [[MPPConnection alloc] initWithStart:110 end:163], + [[MPPConnection alloc] initWithStart:163 end:7], + [[MPPConnection alloc] initWithStart:7 end:110], + [[MPPConnection alloc] initWithStart:228 end:110], + [[MPPConnection alloc] initWithStart:110 end:25], + [[MPPConnection alloc] initWithStart:25 end:228], + [[MPPConnection alloc] initWithStart:356 end:389], + [[MPPConnection alloc] initWithStart:389 end:368], + [[MPPConnection alloc] initWithStart:368 end:356], + [[MPPConnection alloc] initWithStart:11 end:302], + [[MPPConnection alloc] initWithStart:302 end:267], + [[MPPConnection alloc] initWithStart:267 end:11], + [[MPPConnection alloc] initWithStart:452 end:350], + [[MPPConnection alloc] initWithStart:350 end:349], + [[MPPConnection alloc] initWithStart:349 end:452], + [[MPPConnection alloc] initWithStart:302 end:303], + [[MPPConnection alloc] initWithStart:303 end:269], + [[MPPConnection alloc] initWithStart:269 end:302], + [[MPPConnection alloc] initWithStart:357 end:343], + [[MPPConnection alloc] initWithStart:343 end:277], + [[MPPConnection alloc] initWithStart:277 end:357], + [[MPPConnection alloc] initWithStart:452 end:453], + [[MPPConnection alloc] initWithStart:453 end:357], + [[MPPConnection alloc] initWithStart:357 end:452], + [[MPPConnection alloc] initWithStart:333 end:332], + [[MPPConnection alloc] initWithStart:332 end:297], + [[MPPConnection alloc] initWithStart:297 end:333], + [[MPPConnection alloc] initWithStart:175 end:152], + [[MPPConnection alloc] initWithStart:152 end:377], + [[MPPConnection alloc] initWithStart:377 end:175], + [[MPPConnection alloc] initWithStart:347 end:348], + [[MPPConnection alloc] initWithStart:348 end:330], + [[MPPConnection alloc] initWithStart:330 end:347], + [[MPPConnection alloc] initWithStart:303 end:304], + [[MPPConnection alloc] initWithStart:304 end:270], + [[MPPConnection alloc] initWithStart:270 end:303], + [[MPPConnection alloc] initWithStart:9 end:336], + [[MPPConnection alloc] initWithStart:336 end:337], + [[MPPConnection alloc] initWithStart:337 end:9], + [[MPPConnection alloc] initWithStart:278 end:279], + [[MPPConnection alloc] initWithStart:279 end:360], + [[MPPConnection alloc] initWithStart:360 end:278], + [[MPPConnection alloc] initWithStart:418 end:262], + [[MPPConnection alloc] initWithStart:262 end:431], + [[MPPConnection alloc] initWithStart:431 end:418], + [[MPPConnection alloc] initWithStart:304 end:408], + [[MPPConnection alloc] initWithStart:408 end:409], + [[MPPConnection alloc] initWithStart:409 end:304], + [[MPPConnection alloc] initWithStart:310 end:415], + [[MPPConnection alloc] initWithStart:415 end:407], + [[MPPConnection alloc] initWithStart:407 end:310], + [[MPPConnection alloc] initWithStart:270 end:409], + [[MPPConnection alloc] initWithStart:409 end:410], + [[MPPConnection alloc] initWithStart:410 end:270], + [[MPPConnection alloc] initWithStart:450 end:348], + [[MPPConnection alloc] initWithStart:348 end:347], + [[MPPConnection alloc] initWithStart:347 end:450], + [[MPPConnection alloc] initWithStart:422 end:430], + [[MPPConnection alloc] initWithStart:430 end:434], + [[MPPConnection alloc] initWithStart:434 end:422], + [[MPPConnection alloc] initWithStart:313 end:314], + [[MPPConnection alloc] initWithStart:314 end:17], + [[MPPConnection alloc] initWithStart:17 end:313], + [[MPPConnection alloc] initWithStart:306 end:307], + [[MPPConnection alloc] initWithStart:307 end:375], + [[MPPConnection alloc] initWithStart:375 end:306], + [[MPPConnection alloc] initWithStart:387 end:388], + [[MPPConnection alloc] initWithStart:388 end:260], + [[MPPConnection alloc] initWithStart:260 end:387], + [[MPPConnection alloc] initWithStart:286 end:414], + [[MPPConnection alloc] initWithStart:414 end:398], + [[MPPConnection alloc] initWithStart:398 end:286], + [[MPPConnection alloc] initWithStart:335 end:406], + [[MPPConnection alloc] initWithStart:406 end:418], + [[MPPConnection alloc] initWithStart:418 end:335], + [[MPPConnection alloc] initWithStart:364 end:367], + [[MPPConnection alloc] initWithStart:367 end:416], + [[MPPConnection alloc] initWithStart:416 end:364], + [[MPPConnection alloc] initWithStart:423 end:358], + [[MPPConnection alloc] initWithStart:358 end:327], + [[MPPConnection alloc] initWithStart:327 end:423], + [[MPPConnection alloc] initWithStart:251 end:284], + [[MPPConnection alloc] initWithStart:284 end:298], + [[MPPConnection alloc] initWithStart:298 end:251], + [[MPPConnection alloc] initWithStart:281 end:5], + [[MPPConnection alloc] initWithStart:5 end:4], + [[MPPConnection alloc] initWithStart:4 end:281], + [[MPPConnection alloc] initWithStart:373 end:374], + [[MPPConnection alloc] initWithStart:374 end:253], + [[MPPConnection alloc] initWithStart:253 end:373], + [[MPPConnection alloc] initWithStart:307 end:320], + [[MPPConnection alloc] initWithStart:320 end:321], + [[MPPConnection alloc] initWithStart:321 end:307], + [[MPPConnection alloc] initWithStart:425 end:427], + [[MPPConnection alloc] initWithStart:427 end:411], + [[MPPConnection alloc] initWithStart:411 end:425], + [[MPPConnection alloc] initWithStart:421 end:313], + [[MPPConnection alloc] initWithStart:313 end:18], + [[MPPConnection alloc] initWithStart:18 end:421], + [[MPPConnection alloc] initWithStart:321 end:405], + [[MPPConnection alloc] initWithStart:405 end:406], + [[MPPConnection alloc] initWithStart:406 end:321], + [[MPPConnection alloc] initWithStart:320 end:404], + [[MPPConnection alloc] initWithStart:404 end:405], + [[MPPConnection alloc] initWithStart:405 end:320], + [[MPPConnection alloc] initWithStart:315 end:16], + [[MPPConnection alloc] initWithStart:16 end:17], + [[MPPConnection alloc] initWithStart:17 end:315], + [[MPPConnection alloc] initWithStart:426 end:425], + [[MPPConnection alloc] initWithStart:425 end:266], + [[MPPConnection alloc] initWithStart:266 end:426], + [[MPPConnection alloc] initWithStart:377 end:400], + [[MPPConnection alloc] initWithStart:400 end:369], + [[MPPConnection alloc] initWithStart:369 end:377], + [[MPPConnection alloc] initWithStart:322 end:391], + [[MPPConnection alloc] initWithStart:391 end:269], + [[MPPConnection alloc] initWithStart:269 end:322], + [[MPPConnection alloc] initWithStart:417 end:465], + [[MPPConnection alloc] initWithStart:465 end:464], + [[MPPConnection alloc] initWithStart:464 end:417], + [[MPPConnection alloc] initWithStart:386 end:257], + [[MPPConnection alloc] initWithStart:257 end:258], + [[MPPConnection alloc] initWithStart:258 end:386], + [[MPPConnection alloc] initWithStart:466 end:260], + [[MPPConnection alloc] initWithStart:260 end:388], + [[MPPConnection alloc] initWithStart:388 end:466], + [[MPPConnection alloc] initWithStart:456 end:399], + [[MPPConnection alloc] initWithStart:399 end:419], + [[MPPConnection alloc] initWithStart:419 end:456], + [[MPPConnection alloc] initWithStart:284 end:332], + [[MPPConnection alloc] initWithStart:332 end:333], + [[MPPConnection alloc] initWithStart:333 end:284], + [[MPPConnection alloc] initWithStart:417 end:285], + [[MPPConnection alloc] initWithStart:285 end:8], + [[MPPConnection alloc] initWithStart:8 end:417], + [[MPPConnection alloc] initWithStart:346 end:340], + [[MPPConnection alloc] initWithStart:340 end:261], + [[MPPConnection alloc] initWithStart:261 end:346], + [[MPPConnection alloc] initWithStart:413 end:441], + [[MPPConnection alloc] initWithStart:441 end:285], + [[MPPConnection alloc] initWithStart:285 end:413], + [[MPPConnection alloc] initWithStart:327 end:460], + [[MPPConnection alloc] initWithStart:460 end:328], + [[MPPConnection alloc] initWithStart:328 end:327], + [[MPPConnection alloc] initWithStart:355 end:371], + [[MPPConnection alloc] initWithStart:371 end:329], + [[MPPConnection alloc] initWithStart:329 end:355], + [[MPPConnection alloc] initWithStart:392 end:439], + [[MPPConnection alloc] initWithStart:439 end:438], + [[MPPConnection alloc] initWithStart:438 end:392], + [[MPPConnection alloc] initWithStart:382 end:341], + [[MPPConnection alloc] initWithStart:341 end:256], + [[MPPConnection alloc] initWithStart:256 end:382], + [[MPPConnection alloc] initWithStart:429 end:420], + [[MPPConnection alloc] initWithStart:420 end:360], + [[MPPConnection alloc] initWithStart:360 end:429], + [[MPPConnection alloc] initWithStart:364 end:394], + [[MPPConnection alloc] initWithStart:394 end:379], + [[MPPConnection alloc] initWithStart:379 end:364], + [[MPPConnection alloc] initWithStart:277 end:343], + [[MPPConnection alloc] initWithStart:343 end:437], + [[MPPConnection alloc] initWithStart:437 end:277], + [[MPPConnection alloc] initWithStart:443 end:444], + [[MPPConnection alloc] initWithStart:444 end:283], + [[MPPConnection alloc] initWithStart:283 end:443], + [[MPPConnection alloc] initWithStart:275 end:440], + [[MPPConnection alloc] initWithStart:440 end:363], + [[MPPConnection alloc] initWithStart:363 end:275], + [[MPPConnection alloc] initWithStart:431 end:262], + [[MPPConnection alloc] initWithStart:262 end:369], + [[MPPConnection alloc] initWithStart:369 end:431], + [[MPPConnection alloc] initWithStart:297 end:338], + [[MPPConnection alloc] initWithStart:338 end:337], + [[MPPConnection alloc] initWithStart:337 end:297], + [[MPPConnection alloc] initWithStart:273 end:375], + [[MPPConnection alloc] initWithStart:375 end:321], + [[MPPConnection alloc] initWithStart:321 end:273], + [[MPPConnection alloc] initWithStart:450 end:451], + [[MPPConnection alloc] initWithStart:451 end:349], + [[MPPConnection alloc] initWithStart:349 end:450], + [[MPPConnection alloc] initWithStart:446 end:342], + [[MPPConnection alloc] initWithStart:342 end:467], + [[MPPConnection alloc] initWithStart:467 end:446], + [[MPPConnection alloc] initWithStart:293 end:334], + [[MPPConnection alloc] initWithStart:334 end:282], + [[MPPConnection alloc] initWithStart:282 end:293], + [[MPPConnection alloc] initWithStart:458 end:461], + [[MPPConnection alloc] initWithStart:461 end:462], + [[MPPConnection alloc] initWithStart:462 end:458], + [[MPPConnection alloc] initWithStart:276 end:353], + [[MPPConnection alloc] initWithStart:353 end:383], + [[MPPConnection alloc] initWithStart:383 end:276], + [[MPPConnection alloc] initWithStart:308 end:324], + [[MPPConnection alloc] initWithStart:324 end:325], + [[MPPConnection alloc] initWithStart:325 end:308], + [[MPPConnection alloc] initWithStart:276 end:300], + [[MPPConnection alloc] initWithStart:300 end:293], + [[MPPConnection alloc] initWithStart:293 end:276], + [[MPPConnection alloc] initWithStart:372 end:345], + [[MPPConnection alloc] initWithStart:345 end:447], + [[MPPConnection alloc] initWithStart:447 end:372], + [[MPPConnection alloc] initWithStart:352 end:345], + [[MPPConnection alloc] initWithStart:345 end:340], + [[MPPConnection alloc] initWithStart:340 end:352], + [[MPPConnection alloc] initWithStart:274 end:1], + [[MPPConnection alloc] initWithStart:1 end:19], + [[MPPConnection alloc] initWithStart:19 end:274], + [[MPPConnection alloc] initWithStart:456 end:248], + [[MPPConnection alloc] initWithStart:248 end:281], + [[MPPConnection alloc] initWithStart:281 end:456], + [[MPPConnection alloc] initWithStart:436 end:427], + [[MPPConnection alloc] initWithStart:427 end:425], + [[MPPConnection alloc] initWithStart:425 end:436], + [[MPPConnection alloc] initWithStart:381 end:256], + [[MPPConnection alloc] initWithStart:256 end:252], + [[MPPConnection alloc] initWithStart:252 end:381], + [[MPPConnection alloc] initWithStart:269 end:391], + [[MPPConnection alloc] initWithStart:391 end:393], + [[MPPConnection alloc] initWithStart:393 end:269], + [[MPPConnection alloc] initWithStart:200 end:199], + [[MPPConnection alloc] initWithStart:199 end:428], + [[MPPConnection alloc] initWithStart:428 end:200], + [[MPPConnection alloc] initWithStart:266 end:330], + [[MPPConnection alloc] initWithStart:330 end:329], + [[MPPConnection alloc] initWithStart:329 end:266], + [[MPPConnection alloc] initWithStart:287 end:273], + [[MPPConnection alloc] initWithStart:273 end:422], + [[MPPConnection alloc] initWithStart:422 end:287], + [[MPPConnection alloc] initWithStart:250 end:462], + [[MPPConnection alloc] initWithStart:462 end:328], + [[MPPConnection alloc] initWithStart:328 end:250], + [[MPPConnection alloc] initWithStart:258 end:286], + [[MPPConnection alloc] initWithStart:286 end:384], + [[MPPConnection alloc] initWithStart:384 end:258], + [[MPPConnection alloc] initWithStart:265 end:353], + [[MPPConnection alloc] initWithStart:353 end:342], + [[MPPConnection alloc] initWithStart:342 end:265], + [[MPPConnection alloc] initWithStart:387 end:259], + [[MPPConnection alloc] initWithStart:259 end:257], + [[MPPConnection alloc] initWithStart:257 end:387], + [[MPPConnection alloc] initWithStart:424 end:431], + [[MPPConnection alloc] initWithStart:431 end:430], + [[MPPConnection alloc] initWithStart:430 end:424], + [[MPPConnection alloc] initWithStart:342 end:353], + [[MPPConnection alloc] initWithStart:353 end:276], + [[MPPConnection alloc] initWithStart:276 end:342], + [[MPPConnection alloc] initWithStart:273 end:335], + [[MPPConnection alloc] initWithStart:335 end:424], + [[MPPConnection alloc] initWithStart:424 end:273], + [[MPPConnection alloc] initWithStart:292 end:325], + [[MPPConnection alloc] initWithStart:325 end:307], + [[MPPConnection alloc] initWithStart:307 end:292], + [[MPPConnection alloc] initWithStart:366 end:447], + [[MPPConnection alloc] initWithStart:447 end:345], + [[MPPConnection alloc] initWithStart:345 end:366], + [[MPPConnection alloc] initWithStart:271 end:303], + [[MPPConnection alloc] initWithStart:303 end:302], + [[MPPConnection alloc] initWithStart:302 end:271], + [[MPPConnection alloc] initWithStart:423 end:266], + [[MPPConnection alloc] initWithStart:266 end:371], + [[MPPConnection alloc] initWithStart:371 end:423], + [[MPPConnection alloc] initWithStart:294 end:455], + [[MPPConnection alloc] initWithStart:455 end:460], + [[MPPConnection alloc] initWithStart:460 end:294], + [[MPPConnection alloc] initWithStart:279 end:278], + [[MPPConnection alloc] initWithStart:278 end:294], + [[MPPConnection alloc] initWithStart:294 end:279], + [[MPPConnection alloc] initWithStart:271 end:272], + [[MPPConnection alloc] initWithStart:272 end:304], + [[MPPConnection alloc] initWithStart:304 end:271], + [[MPPConnection alloc] initWithStart:432 end:434], + [[MPPConnection alloc] initWithStart:434 end:427], + [[MPPConnection alloc] initWithStart:427 end:432], + [[MPPConnection alloc] initWithStart:272 end:407], + [[MPPConnection alloc] initWithStart:407 end:408], + [[MPPConnection alloc] initWithStart:408 end:272], + [[MPPConnection alloc] initWithStart:394 end:430], + [[MPPConnection alloc] initWithStart:430 end:431], + [[MPPConnection alloc] initWithStart:431 end:394], + [[MPPConnection alloc] initWithStart:395 end:369], + [[MPPConnection alloc] initWithStart:369 end:400], + [[MPPConnection alloc] initWithStart:400 end:395], + [[MPPConnection alloc] initWithStart:334 end:333], + [[MPPConnection alloc] initWithStart:333 end:299], + [[MPPConnection alloc] initWithStart:299 end:334], + [[MPPConnection alloc] initWithStart:351 end:417], + [[MPPConnection alloc] initWithStart:417 end:168], + [[MPPConnection alloc] initWithStart:168 end:351], + [[MPPConnection alloc] initWithStart:352 end:280], + [[MPPConnection alloc] initWithStart:280 end:411], + [[MPPConnection alloc] initWithStart:411 end:352], + [[MPPConnection alloc] initWithStart:325 end:319], + [[MPPConnection alloc] initWithStart:319 end:320], + [[MPPConnection alloc] initWithStart:320 end:325], + [[MPPConnection alloc] initWithStart:295 end:296], + [[MPPConnection alloc] initWithStart:296 end:336], + [[MPPConnection alloc] initWithStart:336 end:295], + [[MPPConnection alloc] initWithStart:319 end:403], + [[MPPConnection alloc] initWithStart:403 end:404], + [[MPPConnection alloc] initWithStart:404 end:319], + [[MPPConnection alloc] initWithStart:330 end:348], + [[MPPConnection alloc] initWithStart:348 end:349], + [[MPPConnection alloc] initWithStart:349 end:330], + [[MPPConnection alloc] initWithStart:293 end:298], + [[MPPConnection alloc] initWithStart:298 end:333], + [[MPPConnection alloc] initWithStart:333 end:293], + [[MPPConnection alloc] initWithStart:323 end:454], + [[MPPConnection alloc] initWithStart:454 end:447], + [[MPPConnection alloc] initWithStart:447 end:323], + [[MPPConnection alloc] initWithStart:15 end:16], + [[MPPConnection alloc] initWithStart:16 end:315], + [[MPPConnection alloc] initWithStart:315 end:15], + [[MPPConnection alloc] initWithStart:358 end:429], + [[MPPConnection alloc] initWithStart:429 end:279], + [[MPPConnection alloc] initWithStart:279 end:358], + [[MPPConnection alloc] initWithStart:14 end:15], + [[MPPConnection alloc] initWithStart:15 end:316], + [[MPPConnection alloc] initWithStart:316 end:14], + [[MPPConnection alloc] initWithStart:285 end:336], + [[MPPConnection alloc] initWithStart:336 end:9], + [[MPPConnection alloc] initWithStart:9 end:285], + [[MPPConnection alloc] initWithStart:329 end:349], + [[MPPConnection alloc] initWithStart:349 end:350], + [[MPPConnection alloc] initWithStart:350 end:329], + [[MPPConnection alloc] initWithStart:374 end:380], + [[MPPConnection alloc] initWithStart:380 end:252], + [[MPPConnection alloc] initWithStart:252 end:374], + [[MPPConnection alloc] initWithStart:318 end:402], + [[MPPConnection alloc] initWithStart:402 end:403], + [[MPPConnection alloc] initWithStart:403 end:318], + [[MPPConnection alloc] initWithStart:6 end:197], + [[MPPConnection alloc] initWithStart:197 end:419], + [[MPPConnection alloc] initWithStart:419 end:6], + [[MPPConnection alloc] initWithStart:318 end:319], + [[MPPConnection alloc] initWithStart:319 end:325], + [[MPPConnection alloc] initWithStart:325 end:318], + [[MPPConnection alloc] initWithStart:367 end:364], + [[MPPConnection alloc] initWithStart:364 end:365], + [[MPPConnection alloc] initWithStart:365 end:367], + [[MPPConnection alloc] initWithStart:435 end:367], + [[MPPConnection alloc] initWithStart:367 end:397], + [[MPPConnection alloc] initWithStart:397 end:435], + [[MPPConnection alloc] initWithStart:344 end:438], + [[MPPConnection alloc] initWithStart:438 end:439], + [[MPPConnection alloc] initWithStart:439 end:344], + [[MPPConnection alloc] initWithStart:272 end:271], + [[MPPConnection alloc] initWithStart:271 end:311], + [[MPPConnection alloc] initWithStart:311 end:272], + [[MPPConnection alloc] initWithStart:195 end:5], + [[MPPConnection alloc] initWithStart:5 end:281], + [[MPPConnection alloc] initWithStart:281 end:195], + [[MPPConnection alloc] initWithStart:273 end:287], + [[MPPConnection alloc] initWithStart:287 end:291], + [[MPPConnection alloc] initWithStart:291 end:273], + [[MPPConnection alloc] initWithStart:396 end:428], + [[MPPConnection alloc] initWithStart:428 end:199], + [[MPPConnection alloc] initWithStart:199 end:396], + [[MPPConnection alloc] initWithStart:311 end:271], + [[MPPConnection alloc] initWithStart:271 end:268], + [[MPPConnection alloc] initWithStart:268 end:311], + [[MPPConnection alloc] initWithStart:283 end:444], + [[MPPConnection alloc] initWithStart:444 end:445], + [[MPPConnection alloc] initWithStart:445 end:283], + [[MPPConnection alloc] initWithStart:373 end:254], + [[MPPConnection alloc] initWithStart:254 end:339], + [[MPPConnection alloc] initWithStart:339 end:373], + [[MPPConnection alloc] initWithStart:282 end:334], + [[MPPConnection alloc] initWithStart:334 end:296], + [[MPPConnection alloc] initWithStart:296 end:282], + [[MPPConnection alloc] initWithStart:449 end:347], + [[MPPConnection alloc] initWithStart:347 end:346], + [[MPPConnection alloc] initWithStart:346 end:449], + [[MPPConnection alloc] initWithStart:264 end:447], + [[MPPConnection alloc] initWithStart:447 end:454], + [[MPPConnection alloc] initWithStart:454 end:264], + [[MPPConnection alloc] initWithStart:336 end:296], + [[MPPConnection alloc] initWithStart:296 end:299], + [[MPPConnection alloc] initWithStart:299 end:336], + [[MPPConnection alloc] initWithStart:338 end:10], + [[MPPConnection alloc] initWithStart:10 end:151], + [[MPPConnection alloc] initWithStart:151 end:338], + [[MPPConnection alloc] initWithStart:278 end:439], + [[MPPConnection alloc] initWithStart:439 end:455], + [[MPPConnection alloc] initWithStart:455 end:278], + [[MPPConnection alloc] initWithStart:292 end:407], + [[MPPConnection alloc] initWithStart:407 end:415], + [[MPPConnection alloc] initWithStart:415 end:292], + [[MPPConnection alloc] initWithStart:358 end:371], + [[MPPConnection alloc] initWithStart:371 end:355], + [[MPPConnection alloc] initWithStart:355 end:358], + [[MPPConnection alloc] initWithStart:340 end:345], + [[MPPConnection alloc] initWithStart:345 end:372], + [[MPPConnection alloc] initWithStart:372 end:340], + [[MPPConnection alloc] initWithStart:346 end:347], + [[MPPConnection alloc] initWithStart:347 end:280], + [[MPPConnection alloc] initWithStart:280 end:346], + [[MPPConnection alloc] initWithStart:442 end:443], + [[MPPConnection alloc] initWithStart:443 end:282], + [[MPPConnection alloc] initWithStart:282 end:442], + [[MPPConnection alloc] initWithStart:19 end:94], + [[MPPConnection alloc] initWithStart:94 end:370], + [[MPPConnection alloc] initWithStart:370 end:19], + [[MPPConnection alloc] initWithStart:441 end:442], + [[MPPConnection alloc] initWithStart:442 end:295], + [[MPPConnection alloc] initWithStart:295 end:441], + [[MPPConnection alloc] initWithStart:248 end:419], + [[MPPConnection alloc] initWithStart:419 end:197], + [[MPPConnection alloc] initWithStart:197 end:248], + [[MPPConnection alloc] initWithStart:263 end:255], + [[MPPConnection alloc] initWithStart:255 end:359], + [[MPPConnection alloc] initWithStart:359 end:263], + [[MPPConnection alloc] initWithStart:440 end:275], + [[MPPConnection alloc] initWithStart:275 end:274], + [[MPPConnection alloc] initWithStart:274 end:440], + [[MPPConnection alloc] initWithStart:300 end:383], + [[MPPConnection alloc] initWithStart:383 end:368], + [[MPPConnection alloc] initWithStart:368 end:300], + [[MPPConnection alloc] initWithStart:351 end:412], + [[MPPConnection alloc] initWithStart:412 end:465], + [[MPPConnection alloc] initWithStart:465 end:351], + [[MPPConnection alloc] initWithStart:263 end:467], + [[MPPConnection alloc] initWithStart:467 end:466], + [[MPPConnection alloc] initWithStart:466 end:263], + [[MPPConnection alloc] initWithStart:301 end:368], + [[MPPConnection alloc] initWithStart:368 end:389], + [[MPPConnection alloc] initWithStart:389 end:301], + [[MPPConnection alloc] initWithStart:395 end:378], + [[MPPConnection alloc] initWithStart:378 end:379], + [[MPPConnection alloc] initWithStart:379 end:395], + [[MPPConnection alloc] initWithStart:412 end:351], + [[MPPConnection alloc] initWithStart:351 end:419], + [[MPPConnection alloc] initWithStart:419 end:412], + [[MPPConnection alloc] initWithStart:436 end:426], + [[MPPConnection alloc] initWithStart:426 end:322], + [[MPPConnection alloc] initWithStart:322 end:436], + [[MPPConnection alloc] initWithStart:2 end:164], + [[MPPConnection alloc] initWithStart:164 end:393], + [[MPPConnection alloc] initWithStart:393 end:2], + [[MPPConnection alloc] initWithStart:370 end:462], + [[MPPConnection alloc] initWithStart:462 end:461], + [[MPPConnection alloc] initWithStart:461 end:370], + [[MPPConnection alloc] initWithStart:164 end:0], + [[MPPConnection alloc] initWithStart:0 end:267], + [[MPPConnection alloc] initWithStart:267 end:164], + [[MPPConnection alloc] initWithStart:302 end:11], + [[MPPConnection alloc] initWithStart:11 end:12], + [[MPPConnection alloc] initWithStart:12 end:302], + [[MPPConnection alloc] initWithStart:268 end:12], + [[MPPConnection alloc] initWithStart:12 end:13], + [[MPPConnection alloc] initWithStart:13 end:268], + [[MPPConnection alloc] initWithStart:293 end:300], + [[MPPConnection alloc] initWithStart:300 end:301], + [[MPPConnection alloc] initWithStart:301 end:293], + [[MPPConnection alloc] initWithStart:446 end:261], + [[MPPConnection alloc] initWithStart:261 end:340], + [[MPPConnection alloc] initWithStart:340 end:446], + [[MPPConnection alloc] initWithStart:330 end:266], + [[MPPConnection alloc] initWithStart:266 end:425], + [[MPPConnection alloc] initWithStart:425 end:330], + [[MPPConnection alloc] initWithStart:426 end:423], + [[MPPConnection alloc] initWithStart:423 end:391], + [[MPPConnection alloc] initWithStart:391 end:426], + [[MPPConnection alloc] initWithStart:429 end:355], + [[MPPConnection alloc] initWithStart:355 end:437], + [[MPPConnection alloc] initWithStart:437 end:429], + [[MPPConnection alloc] initWithStart:391 end:327], + [[MPPConnection alloc] initWithStart:327 end:326], + [[MPPConnection alloc] initWithStart:326 end:391], + [[MPPConnection alloc] initWithStart:440 end:457], + [[MPPConnection alloc] initWithStart:457 end:438], + [[MPPConnection alloc] initWithStart:438 end:440], + [[MPPConnection alloc] initWithStart:341 end:382], + [[MPPConnection alloc] initWithStart:382 end:362], + [[MPPConnection alloc] initWithStart:362 end:341], + [[MPPConnection alloc] initWithStart:459 end:457], + [[MPPConnection alloc] initWithStart:457 end:461], + [[MPPConnection alloc] initWithStart:461 end:459], + [[MPPConnection alloc] initWithStart:434 end:430], + [[MPPConnection alloc] initWithStart:430 end:394], + [[MPPConnection alloc] initWithStart:394 end:434], + [[MPPConnection alloc] initWithStart:414 end:463], + [[MPPConnection alloc] initWithStart:463 end:362], + [[MPPConnection alloc] initWithStart:362 end:414], + [[MPPConnection alloc] initWithStart:396 end:369], + [[MPPConnection alloc] initWithStart:369 end:262], + [[MPPConnection alloc] initWithStart:262 end:396], + [[MPPConnection alloc] initWithStart:354 end:461], + [[MPPConnection alloc] initWithStart:461 end:457], + [[MPPConnection alloc] initWithStart:457 end:354], + [[MPPConnection alloc] initWithStart:316 end:403], + [[MPPConnection alloc] initWithStart:403 end:402], + [[MPPConnection alloc] initWithStart:402 end:316], + [[MPPConnection alloc] initWithStart:315 end:404], + [[MPPConnection alloc] initWithStart:404 end:403], + [[MPPConnection alloc] initWithStart:403 end:315], + [[MPPConnection alloc] initWithStart:314 end:405], + [[MPPConnection alloc] initWithStart:405 end:404], + [[MPPConnection alloc] initWithStart:404 end:314], + [[MPPConnection alloc] initWithStart:313 end:406], + [[MPPConnection alloc] initWithStart:406 end:405], + [[MPPConnection alloc] initWithStart:405 end:313], + [[MPPConnection alloc] initWithStart:421 end:418], + [[MPPConnection alloc] initWithStart:418 end:406], + [[MPPConnection alloc] initWithStart:406 end:421], + [[MPPConnection alloc] initWithStart:366 end:401], + [[MPPConnection alloc] initWithStart:401 end:361], + [[MPPConnection alloc] initWithStart:361 end:366], + [[MPPConnection alloc] initWithStart:306 end:408], + [[MPPConnection alloc] initWithStart:408 end:407], + [[MPPConnection alloc] initWithStart:407 end:306], + [[MPPConnection alloc] initWithStart:291 end:409], + [[MPPConnection alloc] initWithStart:409 end:408], + [[MPPConnection alloc] initWithStart:408 end:291], + [[MPPConnection alloc] initWithStart:287 end:410], + [[MPPConnection alloc] initWithStart:410 end:409], + [[MPPConnection alloc] initWithStart:409 end:287], + [[MPPConnection alloc] initWithStart:432 end:436], + [[MPPConnection alloc] initWithStart:436 end:410], + [[MPPConnection alloc] initWithStart:410 end:432], + [[MPPConnection alloc] initWithStart:434 end:416], + [[MPPConnection alloc] initWithStart:416 end:411], + [[MPPConnection alloc] initWithStart:411 end:434], + [[MPPConnection alloc] initWithStart:264 end:368], + [[MPPConnection alloc] initWithStart:368 end:383], + [[MPPConnection alloc] initWithStart:383 end:264], + [[MPPConnection alloc] initWithStart:309 end:438], + [[MPPConnection alloc] initWithStart:438 end:457], + [[MPPConnection alloc] initWithStart:457 end:309], + [[MPPConnection alloc] initWithStart:352 end:376], + [[MPPConnection alloc] initWithStart:376 end:401], + [[MPPConnection alloc] initWithStart:401 end:352], + [[MPPConnection alloc] initWithStart:274 end:275], + [[MPPConnection alloc] initWithStart:275 end:4], + [[MPPConnection alloc] initWithStart:4 end:274], + [[MPPConnection alloc] initWithStart:421 end:428], + [[MPPConnection alloc] initWithStart:428 end:262], + [[MPPConnection alloc] initWithStart:262 end:421], + [[MPPConnection alloc] initWithStart:294 end:327], + [[MPPConnection alloc] initWithStart:327 end:358], + [[MPPConnection alloc] initWithStart:358 end:294], + [[MPPConnection alloc] initWithStart:433 end:416], + [[MPPConnection alloc] initWithStart:416 end:367], + [[MPPConnection alloc] initWithStart:367 end:433], + [[MPPConnection alloc] initWithStart:289 end:455], + [[MPPConnection alloc] initWithStart:455 end:439], + [[MPPConnection alloc] initWithStart:439 end:289], + [[MPPConnection alloc] initWithStart:462 end:370], + [[MPPConnection alloc] initWithStart:370 end:326], + [[MPPConnection alloc] initWithStart:326 end:462], + [[MPPConnection alloc] initWithStart:2 end:326], + [[MPPConnection alloc] initWithStart:326 end:370], + [[MPPConnection alloc] initWithStart:370 end:2], + [[MPPConnection alloc] initWithStart:305 end:460], + [[MPPConnection alloc] initWithStart:460 end:455], + [[MPPConnection alloc] initWithStart:455 end:305], + [[MPPConnection alloc] initWithStart:254 end:449], + [[MPPConnection alloc] initWithStart:449 end:448], + [[MPPConnection alloc] initWithStart:448 end:254], + [[MPPConnection alloc] initWithStart:255 end:261], + [[MPPConnection alloc] initWithStart:261 end:446], + [[MPPConnection alloc] initWithStart:446 end:255], + [[MPPConnection alloc] initWithStart:253 end:450], + [[MPPConnection alloc] initWithStart:450 end:449], + [[MPPConnection alloc] initWithStart:449 end:253], + [[MPPConnection alloc] initWithStart:252 end:451], + [[MPPConnection alloc] initWithStart:451 end:450], + [[MPPConnection alloc] initWithStart:450 end:252], + [[MPPConnection alloc] initWithStart:256 end:452], + [[MPPConnection alloc] initWithStart:452 end:451], + [[MPPConnection alloc] initWithStart:451 end:256], + [[MPPConnection alloc] initWithStart:341 end:453], + [[MPPConnection alloc] initWithStart:453 end:452], + [[MPPConnection alloc] initWithStart:452 end:341], + [[MPPConnection alloc] initWithStart:413 end:464], + [[MPPConnection alloc] initWithStart:464 end:463], + [[MPPConnection alloc] initWithStart:463 end:413], + [[MPPConnection alloc] initWithStart:441 end:413], + [[MPPConnection alloc] initWithStart:413 end:414], + [[MPPConnection alloc] initWithStart:414 end:441], + [[MPPConnection alloc] initWithStart:258 end:442], + [[MPPConnection alloc] initWithStart:442 end:441], + [[MPPConnection alloc] initWithStart:441 end:258], + [[MPPConnection alloc] initWithStart:257 end:443], + [[MPPConnection alloc] initWithStart:443 end:442], + [[MPPConnection alloc] initWithStart:442 end:257], + [[MPPConnection alloc] initWithStart:259 end:444], + [[MPPConnection alloc] initWithStart:444 end:443], + [[MPPConnection alloc] initWithStart:443 end:259], + [[MPPConnection alloc] initWithStart:260 end:445], + [[MPPConnection alloc] initWithStart:445 end:444], + [[MPPConnection alloc] initWithStart:444 end:260], + [[MPPConnection alloc] initWithStart:467 end:342], + [[MPPConnection alloc] initWithStart:342 end:445], + [[MPPConnection alloc] initWithStart:445 end:467], + [[MPPConnection alloc] initWithStart:459 end:458], + [[MPPConnection alloc] initWithStart:458 end:250], + [[MPPConnection alloc] initWithStart:250 end:459], + [[MPPConnection alloc] initWithStart:289 end:392], + [[MPPConnection alloc] initWithStart:392 end:290], + [[MPPConnection alloc] initWithStart:290 end:289], + [[MPPConnection alloc] initWithStart:290 end:328], + [[MPPConnection alloc] initWithStart:328 end:460], + [[MPPConnection alloc] initWithStart:460 end:290], + [[MPPConnection alloc] initWithStart:376 end:433], + [[MPPConnection alloc] initWithStart:433 end:435], + [[MPPConnection alloc] initWithStart:435 end:376], + [[MPPConnection alloc] initWithStart:250 end:290], + [[MPPConnection alloc] initWithStart:290 end:392], + [[MPPConnection alloc] initWithStart:392 end:250], + [[MPPConnection alloc] initWithStart:411 end:416], + [[MPPConnection alloc] initWithStart:416 end:433], + [[MPPConnection alloc] initWithStart:433 end:411], + [[MPPConnection alloc] initWithStart:341 end:463], + [[MPPConnection alloc] initWithStart:463 end:464], + [[MPPConnection alloc] initWithStart:464 end:341], + [[MPPConnection alloc] initWithStart:453 end:464], + [[MPPConnection alloc] initWithStart:464 end:465], + [[MPPConnection alloc] initWithStart:465 end:453], + [[MPPConnection alloc] initWithStart:357 end:465], + [[MPPConnection alloc] initWithStart:465 end:412], + [[MPPConnection alloc] initWithStart:412 end:357], + [[MPPConnection alloc] initWithStart:343 end:412], + [[MPPConnection alloc] initWithStart:412 end:399], + [[MPPConnection alloc] initWithStart:399 end:343], + [[MPPConnection alloc] initWithStart:360 end:363], + [[MPPConnection alloc] initWithStart:363 end:440], + [[MPPConnection alloc] initWithStart:440 end:360], + [[MPPConnection alloc] initWithStart:437 end:399], + [[MPPConnection alloc] initWithStart:399 end:456], + [[MPPConnection alloc] initWithStart:456 end:437], + [[MPPConnection alloc] initWithStart:420 end:456], + [[MPPConnection alloc] initWithStart:456 end:363], + [[MPPConnection alloc] initWithStart:363 end:420], + [[MPPConnection alloc] initWithStart:401 end:435], + [[MPPConnection alloc] initWithStart:435 end:288], + [[MPPConnection alloc] initWithStart:288 end:401], + [[MPPConnection alloc] initWithStart:372 end:383], + [[MPPConnection alloc] initWithStart:383 end:353], + [[MPPConnection alloc] initWithStart:353 end:372], + [[MPPConnection alloc] initWithStart:339 end:255], + [[MPPConnection alloc] initWithStart:255 end:249], + [[MPPConnection alloc] initWithStart:249 end:339], + [[MPPConnection alloc] initWithStart:448 end:261], + [[MPPConnection alloc] initWithStart:261 end:255], + [[MPPConnection alloc] initWithStart:255 end:448], + [[MPPConnection alloc] initWithStart:133 end:243], + [[MPPConnection alloc] initWithStart:243 end:190], + [[MPPConnection alloc] initWithStart:190 end:133], + [[MPPConnection alloc] initWithStart:133 end:155], + [[MPPConnection alloc] initWithStart:155 end:112], + [[MPPConnection alloc] initWithStart:112 end:133], + [[MPPConnection alloc] initWithStart:33 end:246], + [[MPPConnection alloc] initWithStart:246 end:247], + [[MPPConnection alloc] initWithStart:247 end:33], + [[MPPConnection alloc] initWithStart:33 end:130], + [[MPPConnection alloc] initWithStart:130 end:25], + [[MPPConnection alloc] initWithStart:25 end:33], + [[MPPConnection alloc] initWithStart:398 end:384], + [[MPPConnection alloc] initWithStart:384 end:286], + [[MPPConnection alloc] initWithStart:286 end:398], + [[MPPConnection alloc] initWithStart:362 end:398], + [[MPPConnection alloc] initWithStart:398 end:414], + [[MPPConnection alloc] initWithStart:414 end:362], + [[MPPConnection alloc] initWithStart:362 end:463], + [[MPPConnection alloc] initWithStart:463 end:341], + [[MPPConnection alloc] initWithStart:341 end:362], + [[MPPConnection alloc] initWithStart:263 end:359], + [[MPPConnection alloc] initWithStart:359 end:467], + [[MPPConnection alloc] initWithStart:467 end:263], + [[MPPConnection alloc] initWithStart:263 end:249], + [[MPPConnection alloc] initWithStart:249 end:255], + [[MPPConnection alloc] initWithStart:255 end:263], + [[MPPConnection alloc] initWithStart:466 end:467], + [[MPPConnection alloc] initWithStart:467 end:260], + [[MPPConnection alloc] initWithStart:260 end:466], + [[MPPConnection alloc] initWithStart:75 end:60], + [[MPPConnection alloc] initWithStart:60 end:166], + [[MPPConnection alloc] initWithStart:166 end:75], + [[MPPConnection alloc] initWithStart:238 end:239], + [[MPPConnection alloc] initWithStart:239 end:79], + [[MPPConnection alloc] initWithStart:79 end:238], + [[MPPConnection alloc] initWithStart:162 end:127], + [[MPPConnection alloc] initWithStart:127 end:139], + [[MPPConnection alloc] initWithStart:139 end:162], + [[MPPConnection alloc] initWithStart:72 end:11], + [[MPPConnection alloc] initWithStart:11 end:37], + [[MPPConnection alloc] initWithStart:37 end:72], + [[MPPConnection alloc] initWithStart:121 end:232], + [[MPPConnection alloc] initWithStart:232 end:120], + [[MPPConnection alloc] initWithStart:120 end:121], + [[MPPConnection alloc] initWithStart:73 end:72], + [[MPPConnection alloc] initWithStart:72 end:39], + [[MPPConnection alloc] initWithStart:39 end:73], + [[MPPConnection alloc] initWithStart:114 end:128], + [[MPPConnection alloc] initWithStart:128 end:47], + [[MPPConnection alloc] initWithStart:47 end:114], + [[MPPConnection alloc] initWithStart:233 end:232], + [[MPPConnection alloc] initWithStart:232 end:128], + [[MPPConnection alloc] initWithStart:128 end:233], + [[MPPConnection alloc] initWithStart:103 end:104], + [[MPPConnection alloc] initWithStart:104 end:67], + [[MPPConnection alloc] initWithStart:67 end:103], + [[MPPConnection alloc] initWithStart:152 end:175], + [[MPPConnection alloc] initWithStart:175 end:148], + [[MPPConnection alloc] initWithStart:148 end:152], + [[MPPConnection alloc] initWithStart:119 end:118], + [[MPPConnection alloc] initWithStart:118 end:101], + [[MPPConnection alloc] initWithStart:101 end:119], + [[MPPConnection alloc] initWithStart:74 end:73], + [[MPPConnection alloc] initWithStart:73 end:40], + [[MPPConnection alloc] initWithStart:40 end:74], + [[MPPConnection alloc] initWithStart:107 end:9], + [[MPPConnection alloc] initWithStart:9 end:108], + [[MPPConnection alloc] initWithStart:108 end:107], + [[MPPConnection alloc] initWithStart:49 end:48], + [[MPPConnection alloc] initWithStart:48 end:131], + [[MPPConnection alloc] initWithStart:131 end:49], + [[MPPConnection alloc] initWithStart:32 end:194], + [[MPPConnection alloc] initWithStart:194 end:211], + [[MPPConnection alloc] initWithStart:211 end:32], + [[MPPConnection alloc] initWithStart:184 end:74], + [[MPPConnection alloc] initWithStart:74 end:185], + [[MPPConnection alloc] initWithStart:185 end:184], + [[MPPConnection alloc] initWithStart:191 end:80], + [[MPPConnection alloc] initWithStart:80 end:183], + [[MPPConnection alloc] initWithStart:183 end:191], + [[MPPConnection alloc] initWithStart:185 end:40], + [[MPPConnection alloc] initWithStart:40 end:186], + [[MPPConnection alloc] initWithStart:186 end:185], + [[MPPConnection alloc] initWithStart:119 end:230], + [[MPPConnection alloc] initWithStart:230 end:118], + [[MPPConnection alloc] initWithStart:118 end:119], + [[MPPConnection alloc] initWithStart:210 end:202], + [[MPPConnection alloc] initWithStart:202 end:214], + [[MPPConnection alloc] initWithStart:214 end:210], + [[MPPConnection alloc] initWithStart:84 end:83], + [[MPPConnection alloc] initWithStart:83 end:17], + [[MPPConnection alloc] initWithStart:17 end:84], + [[MPPConnection alloc] initWithStart:77 end:76], + [[MPPConnection alloc] initWithStart:76 end:146], + [[MPPConnection alloc] initWithStart:146 end:77], + [[MPPConnection alloc] initWithStart:161 end:160], + [[MPPConnection alloc] initWithStart:160 end:30], + [[MPPConnection alloc] initWithStart:30 end:161], + [[MPPConnection alloc] initWithStart:190 end:56], + [[MPPConnection alloc] initWithStart:56 end:173], + [[MPPConnection alloc] initWithStart:173 end:190], + [[MPPConnection alloc] initWithStart:182 end:106], + [[MPPConnection alloc] initWithStart:106 end:194], + [[MPPConnection alloc] initWithStart:194 end:182], + [[MPPConnection alloc] initWithStart:138 end:135], + [[MPPConnection alloc] initWithStart:135 end:192], + [[MPPConnection alloc] initWithStart:192 end:138], + [[MPPConnection alloc] initWithStart:129 end:203], + [[MPPConnection alloc] initWithStart:203 end:98], + [[MPPConnection alloc] initWithStart:98 end:129], + [[MPPConnection alloc] initWithStart:54 end:21], + [[MPPConnection alloc] initWithStart:21 end:68], + [[MPPConnection alloc] initWithStart:68 end:54], + [[MPPConnection alloc] initWithStart:5 end:51], + [[MPPConnection alloc] initWithStart:51 end:4], + [[MPPConnection alloc] initWithStart:4 end:5], + [[MPPConnection alloc] initWithStart:145 end:144], + [[MPPConnection alloc] initWithStart:144 end:23], + [[MPPConnection alloc] initWithStart:23 end:145], + [[MPPConnection alloc] initWithStart:90 end:77], + [[MPPConnection alloc] initWithStart:77 end:91], + [[MPPConnection alloc] initWithStart:91 end:90], + [[MPPConnection alloc] initWithStart:207 end:205], + [[MPPConnection alloc] initWithStart:205 end:187], + [[MPPConnection alloc] initWithStart:187 end:207], + [[MPPConnection alloc] initWithStart:83 end:201], + [[MPPConnection alloc] initWithStart:201 end:18], + [[MPPConnection alloc] initWithStart:18 end:83], + [[MPPConnection alloc] initWithStart:181 end:91], + [[MPPConnection alloc] initWithStart:91 end:182], + [[MPPConnection alloc] initWithStart:182 end:181], + [[MPPConnection alloc] initWithStart:180 end:90], + [[MPPConnection alloc] initWithStart:90 end:181], + [[MPPConnection alloc] initWithStart:181 end:180], + [[MPPConnection alloc] initWithStart:16 end:85], + [[MPPConnection alloc] initWithStart:85 end:17], + [[MPPConnection alloc] initWithStart:17 end:16], + [[MPPConnection alloc] initWithStart:205 end:206], + [[MPPConnection alloc] initWithStart:206 end:36], + [[MPPConnection alloc] initWithStart:36 end:205], + [[MPPConnection alloc] initWithStart:176 end:148], + [[MPPConnection alloc] initWithStart:148 end:140], + [[MPPConnection alloc] initWithStart:140 end:176], + [[MPPConnection alloc] initWithStart:165 end:92], + [[MPPConnection alloc] initWithStart:92 end:39], + [[MPPConnection alloc] initWithStart:39 end:165], + [[MPPConnection alloc] initWithStart:245 end:193], + [[MPPConnection alloc] initWithStart:193 end:244], + [[MPPConnection alloc] initWithStart:244 end:245], + [[MPPConnection alloc] initWithStart:27 end:159], + [[MPPConnection alloc] initWithStart:159 end:28], + [[MPPConnection alloc] initWithStart:28 end:27], + [[MPPConnection alloc] initWithStart:30 end:247], + [[MPPConnection alloc] initWithStart:247 end:161], + [[MPPConnection alloc] initWithStart:161 end:30], + [[MPPConnection alloc] initWithStart:174 end:236], + [[MPPConnection alloc] initWithStart:236 end:196], + [[MPPConnection alloc] initWithStart:196 end:174], + [[MPPConnection alloc] initWithStart:103 end:54], + [[MPPConnection alloc] initWithStart:54 end:104], + [[MPPConnection alloc] initWithStart:104 end:103], + [[MPPConnection alloc] initWithStart:55 end:193], + [[MPPConnection alloc] initWithStart:193 end:8], + [[MPPConnection alloc] initWithStart:8 end:55], + [[MPPConnection alloc] initWithStart:111 end:117], + [[MPPConnection alloc] initWithStart:117 end:31], + [[MPPConnection alloc] initWithStart:31 end:111], + [[MPPConnection alloc] initWithStart:221 end:189], + [[MPPConnection alloc] initWithStart:189 end:55], + [[MPPConnection alloc] initWithStart:55 end:221], + [[MPPConnection alloc] initWithStart:240 end:98], + [[MPPConnection alloc] initWithStart:98 end:99], + [[MPPConnection alloc] initWithStart:99 end:240], + [[MPPConnection alloc] initWithStart:142 end:126], + [[MPPConnection alloc] initWithStart:126 end:100], + [[MPPConnection alloc] initWithStart:100 end:142], + [[MPPConnection alloc] initWithStart:219 end:166], + [[MPPConnection alloc] initWithStart:166 end:218], + [[MPPConnection alloc] initWithStart:218 end:219], + [[MPPConnection alloc] initWithStart:112 end:155], + [[MPPConnection alloc] initWithStart:155 end:26], + [[MPPConnection alloc] initWithStart:26 end:112], + [[MPPConnection alloc] initWithStart:198 end:209], + [[MPPConnection alloc] initWithStart:209 end:131], + [[MPPConnection alloc] initWithStart:131 end:198], + [[MPPConnection alloc] initWithStart:169 end:135], + [[MPPConnection alloc] initWithStart:135 end:150], + [[MPPConnection alloc] initWithStart:150 end:169], + [[MPPConnection alloc] initWithStart:114 end:47], + [[MPPConnection alloc] initWithStart:47 end:217], + [[MPPConnection alloc] initWithStart:217 end:114], + [[MPPConnection alloc] initWithStart:224 end:223], + [[MPPConnection alloc] initWithStart:223 end:53], + [[MPPConnection alloc] initWithStart:53 end:224], + [[MPPConnection alloc] initWithStart:220 end:45], + [[MPPConnection alloc] initWithStart:45 end:134], + [[MPPConnection alloc] initWithStart:134 end:220], + [[MPPConnection alloc] initWithStart:32 end:211], + [[MPPConnection alloc] initWithStart:211 end:140], + [[MPPConnection alloc] initWithStart:140 end:32], + [[MPPConnection alloc] initWithStart:109 end:67], + [[MPPConnection alloc] initWithStart:67 end:108], + [[MPPConnection alloc] initWithStart:108 end:109], + [[MPPConnection alloc] initWithStart:146 end:43], + [[MPPConnection alloc] initWithStart:43 end:91], + [[MPPConnection alloc] initWithStart:91 end:146], + [[MPPConnection alloc] initWithStart:231 end:230], + [[MPPConnection alloc] initWithStart:230 end:120], + [[MPPConnection alloc] initWithStart:120 end:231], + [[MPPConnection alloc] initWithStart:113 end:226], + [[MPPConnection alloc] initWithStart:226 end:247], + [[MPPConnection alloc] initWithStart:247 end:113], + [[MPPConnection alloc] initWithStart:105 end:63], + [[MPPConnection alloc] initWithStart:63 end:52], + [[MPPConnection alloc] initWithStart:52 end:105], + [[MPPConnection alloc] initWithStart:241 end:238], + [[MPPConnection alloc] initWithStart:238 end:242], + [[MPPConnection alloc] initWithStart:242 end:241], + [[MPPConnection alloc] initWithStart:124 end:46], + [[MPPConnection alloc] initWithStart:46 end:156], + [[MPPConnection alloc] initWithStart:156 end:124], + [[MPPConnection alloc] initWithStart:95 end:78], + [[MPPConnection alloc] initWithStart:78 end:96], + [[MPPConnection alloc] initWithStart:96 end:95], + [[MPPConnection alloc] initWithStart:70 end:46], + [[MPPConnection alloc] initWithStart:46 end:63], + [[MPPConnection alloc] initWithStart:63 end:70], + [[MPPConnection alloc] initWithStart:116 end:143], + [[MPPConnection alloc] initWithStart:143 end:227], + [[MPPConnection alloc] initWithStart:227 end:116], + [[MPPConnection alloc] initWithStart:116 end:123], + [[MPPConnection alloc] initWithStart:123 end:111], + [[MPPConnection alloc] initWithStart:111 end:116], + [[MPPConnection alloc] initWithStart:1 end:44], + [[MPPConnection alloc] initWithStart:44 end:19], + [[MPPConnection alloc] initWithStart:19 end:1], + [[MPPConnection alloc] initWithStart:3 end:236], + [[MPPConnection alloc] initWithStart:236 end:51], + [[MPPConnection alloc] initWithStart:51 end:3], + [[MPPConnection alloc] initWithStart:207 end:216], + [[MPPConnection alloc] initWithStart:216 end:205], + [[MPPConnection alloc] initWithStart:205 end:207], + [[MPPConnection alloc] initWithStart:26 end:154], + [[MPPConnection alloc] initWithStart:154 end:22], + [[MPPConnection alloc] initWithStart:22 end:26], + [[MPPConnection alloc] initWithStart:165 end:39], + [[MPPConnection alloc] initWithStart:39 end:167], + [[MPPConnection alloc] initWithStart:167 end:165], + [[MPPConnection alloc] initWithStart:199 end:200], + [[MPPConnection alloc] initWithStart:200 end:208], + [[MPPConnection alloc] initWithStart:208 end:199], + [[MPPConnection alloc] initWithStart:101 end:36], + [[MPPConnection alloc] initWithStart:36 end:100], + [[MPPConnection alloc] initWithStart:100 end:101], + [[MPPConnection alloc] initWithStart:43 end:57], + [[MPPConnection alloc] initWithStart:57 end:202], + [[MPPConnection alloc] initWithStart:202 end:43], + [[MPPConnection alloc] initWithStart:242 end:20], + [[MPPConnection alloc] initWithStart:20 end:99], + [[MPPConnection alloc] initWithStart:99 end:242], + [[MPPConnection alloc] initWithStart:56 end:28], + [[MPPConnection alloc] initWithStart:28 end:157], + [[MPPConnection alloc] initWithStart:157 end:56], + [[MPPConnection alloc] initWithStart:124 end:35], + [[MPPConnection alloc] initWithStart:35 end:113], + [[MPPConnection alloc] initWithStart:113 end:124], + [[MPPConnection alloc] initWithStart:29 end:160], + [[MPPConnection alloc] initWithStart:160 end:27], + [[MPPConnection alloc] initWithStart:27 end:29], + [[MPPConnection alloc] initWithStart:211 end:204], + [[MPPConnection alloc] initWithStart:204 end:210], + [[MPPConnection alloc] initWithStart:210 end:211], + [[MPPConnection alloc] initWithStart:124 end:113], + [[MPPConnection alloc] initWithStart:113 end:46], + [[MPPConnection alloc] initWithStart:46 end:124], + [[MPPConnection alloc] initWithStart:106 end:43], + [[MPPConnection alloc] initWithStart:43 end:204], + [[MPPConnection alloc] initWithStart:204 end:106], + [[MPPConnection alloc] initWithStart:96 end:62], + [[MPPConnection alloc] initWithStart:62 end:77], + [[MPPConnection alloc] initWithStart:77 end:96], + [[MPPConnection alloc] initWithStart:227 end:137], + [[MPPConnection alloc] initWithStart:137 end:116], + [[MPPConnection alloc] initWithStart:116 end:227], + [[MPPConnection alloc] initWithStart:73 end:41], + [[MPPConnection alloc] initWithStart:41 end:72], + [[MPPConnection alloc] initWithStart:72 end:73], + [[MPPConnection alloc] initWithStart:36 end:203], + [[MPPConnection alloc] initWithStart:203 end:142], + [[MPPConnection alloc] initWithStart:142 end:36], + [[MPPConnection alloc] initWithStart:235 end:64], + [[MPPConnection alloc] initWithStart:64 end:240], + [[MPPConnection alloc] initWithStart:240 end:235], + [[MPPConnection alloc] initWithStart:48 end:49], + [[MPPConnection alloc] initWithStart:49 end:64], + [[MPPConnection alloc] initWithStart:64 end:48], + [[MPPConnection alloc] initWithStart:42 end:41], + [[MPPConnection alloc] initWithStart:41 end:74], + [[MPPConnection alloc] initWithStart:74 end:42], + [[MPPConnection alloc] initWithStart:214 end:212], + [[MPPConnection alloc] initWithStart:212 end:207], + [[MPPConnection alloc] initWithStart:207 end:214], + [[MPPConnection alloc] initWithStart:183 end:42], + [[MPPConnection alloc] initWithStart:42 end:184], + [[MPPConnection alloc] initWithStart:184 end:183], + [[MPPConnection alloc] initWithStart:210 end:169], + [[MPPConnection alloc] initWithStart:169 end:211], + [[MPPConnection alloc] initWithStart:211 end:210], + [[MPPConnection alloc] initWithStart:140 end:170], + [[MPPConnection alloc] initWithStart:170 end:176], + [[MPPConnection alloc] initWithStart:176 end:140], + [[MPPConnection alloc] initWithStart:104 end:105], + [[MPPConnection alloc] initWithStart:105 end:69], + [[MPPConnection alloc] initWithStart:69 end:104], + [[MPPConnection alloc] initWithStart:193 end:122], + [[MPPConnection alloc] initWithStart:122 end:168], + [[MPPConnection alloc] initWithStart:168 end:193], + [[MPPConnection alloc] initWithStart:50 end:123], + [[MPPConnection alloc] initWithStart:123 end:187], + [[MPPConnection alloc] initWithStart:187 end:50], + [[MPPConnection alloc] initWithStart:89 end:96], + [[MPPConnection alloc] initWithStart:96 end:90], + [[MPPConnection alloc] initWithStart:90 end:89], + [[MPPConnection alloc] initWithStart:66 end:65], + [[MPPConnection alloc] initWithStart:65 end:107], + [[MPPConnection alloc] initWithStart:107 end:66], + [[MPPConnection alloc] initWithStart:179 end:89], + [[MPPConnection alloc] initWithStart:89 end:180], + [[MPPConnection alloc] initWithStart:180 end:179], + [[MPPConnection alloc] initWithStart:119 end:101], + [[MPPConnection alloc] initWithStart:101 end:120], + [[MPPConnection alloc] initWithStart:120 end:119], + [[MPPConnection alloc] initWithStart:68 end:63], + [[MPPConnection alloc] initWithStart:63 end:104], + [[MPPConnection alloc] initWithStart:104 end:68], + [[MPPConnection alloc] initWithStart:234 end:93], + [[MPPConnection alloc] initWithStart:93 end:227], + [[MPPConnection alloc] initWithStart:227 end:234], + [[MPPConnection alloc] initWithStart:16 end:15], + [[MPPConnection alloc] initWithStart:15 end:85], + [[MPPConnection alloc] initWithStart:85 end:16], + [[MPPConnection alloc] initWithStart:209 end:129], + [[MPPConnection alloc] initWithStart:129 end:49], + [[MPPConnection alloc] initWithStart:49 end:209], + [[MPPConnection alloc] initWithStart:15 end:14], + [[MPPConnection alloc] initWithStart:14 end:86], + [[MPPConnection alloc] initWithStart:86 end:15], + [[MPPConnection alloc] initWithStart:107 end:55], + [[MPPConnection alloc] initWithStart:55 end:9], + [[MPPConnection alloc] initWithStart:9 end:107], + [[MPPConnection alloc] initWithStart:120 end:100], + [[MPPConnection alloc] initWithStart:100 end:121], + [[MPPConnection alloc] initWithStart:121 end:120], + [[MPPConnection alloc] initWithStart:153 end:145], + [[MPPConnection alloc] initWithStart:145 end:22], + [[MPPConnection alloc] initWithStart:22 end:153], + [[MPPConnection alloc] initWithStart:178 end:88], + [[MPPConnection alloc] initWithStart:88 end:179], + [[MPPConnection alloc] initWithStart:179 end:178], + [[MPPConnection alloc] initWithStart:197 end:6], + [[MPPConnection alloc] initWithStart:6 end:196], + [[MPPConnection alloc] initWithStart:196 end:197], + [[MPPConnection alloc] initWithStart:89 end:88], + [[MPPConnection alloc] initWithStart:88 end:96], + [[MPPConnection alloc] initWithStart:96 end:89], + [[MPPConnection alloc] initWithStart:135 end:138], + [[MPPConnection alloc] initWithStart:138 end:136], + [[MPPConnection alloc] initWithStart:136 end:135], + [[MPPConnection alloc] initWithStart:138 end:215], + [[MPPConnection alloc] initWithStart:215 end:172], + [[MPPConnection alloc] initWithStart:172 end:138], + [[MPPConnection alloc] initWithStart:218 end:115], + [[MPPConnection alloc] initWithStart:115 end:219], + [[MPPConnection alloc] initWithStart:219 end:218], + [[MPPConnection alloc] initWithStart:41 end:42], + [[MPPConnection alloc] initWithStart:42 end:81], + [[MPPConnection alloc] initWithStart:81 end:41], + [[MPPConnection alloc] initWithStart:5 end:195], + [[MPPConnection alloc] initWithStart:195 end:51], + [[MPPConnection alloc] initWithStart:51 end:5], + [[MPPConnection alloc] initWithStart:57 end:43], + [[MPPConnection alloc] initWithStart:43 end:61], + [[MPPConnection alloc] initWithStart:61 end:57], + [[MPPConnection alloc] initWithStart:208 end:171], + [[MPPConnection alloc] initWithStart:171 end:199], + [[MPPConnection alloc] initWithStart:199 end:208], + [[MPPConnection alloc] initWithStart:41 end:81], + [[MPPConnection alloc] initWithStart:81 end:38], + [[MPPConnection alloc] initWithStart:38 end:41], + [[MPPConnection alloc] initWithStart:224 end:53], + [[MPPConnection alloc] initWithStart:53 end:225], + [[MPPConnection alloc] initWithStart:225 end:224], + [[MPPConnection alloc] initWithStart:24 end:144], + [[MPPConnection alloc] initWithStart:144 end:110], + [[MPPConnection alloc] initWithStart:110 end:24], + [[MPPConnection alloc] initWithStart:105 end:52], + [[MPPConnection alloc] initWithStart:52 end:66], + [[MPPConnection alloc] initWithStart:66 end:105], + [[MPPConnection alloc] initWithStart:118 end:229], + [[MPPConnection alloc] initWithStart:229 end:117], + [[MPPConnection alloc] initWithStart:117 end:118], + [[MPPConnection alloc] initWithStart:227 end:34], + [[MPPConnection alloc] initWithStart:34 end:234], + [[MPPConnection alloc] initWithStart:234 end:227], + [[MPPConnection alloc] initWithStart:66 end:107], + [[MPPConnection alloc] initWithStart:107 end:69], + [[MPPConnection alloc] initWithStart:69 end:66], + [[MPPConnection alloc] initWithStart:10 end:109], + [[MPPConnection alloc] initWithStart:109 end:151], + [[MPPConnection alloc] initWithStart:151 end:10], + [[MPPConnection alloc] initWithStart:219 end:48], + [[MPPConnection alloc] initWithStart:48 end:235], + [[MPPConnection alloc] initWithStart:235 end:219], + [[MPPConnection alloc] initWithStart:183 end:62], + [[MPPConnection alloc] initWithStart:62 end:191], + [[MPPConnection alloc] initWithStart:191 end:183], + [[MPPConnection alloc] initWithStart:142 end:129], + [[MPPConnection alloc] initWithStart:129 end:126], + [[MPPConnection alloc] initWithStart:126 end:142], + [[MPPConnection alloc] initWithStart:116 end:111], + [[MPPConnection alloc] initWithStart:111 end:143], + [[MPPConnection alloc] initWithStart:143 end:116], + [[MPPConnection alloc] initWithStart:118 end:117], + [[MPPConnection alloc] initWithStart:117 end:50], + [[MPPConnection alloc] initWithStart:50 end:118], + [[MPPConnection alloc] initWithStart:223 end:222], + [[MPPConnection alloc] initWithStart:222 end:52], + [[MPPConnection alloc] initWithStart:52 end:223], + [[MPPConnection alloc] initWithStart:94 end:19], + [[MPPConnection alloc] initWithStart:19 end:141], + [[MPPConnection alloc] initWithStart:141 end:94], + [[MPPConnection alloc] initWithStart:222 end:221], + [[MPPConnection alloc] initWithStart:221 end:65], + [[MPPConnection alloc] initWithStart:65 end:222], + [[MPPConnection alloc] initWithStart:196 end:3], + [[MPPConnection alloc] initWithStart:3 end:197], + [[MPPConnection alloc] initWithStart:197 end:196], + [[MPPConnection alloc] initWithStart:45 end:220], + [[MPPConnection alloc] initWithStart:220 end:44], + [[MPPConnection alloc] initWithStart:44 end:45], + [[MPPConnection alloc] initWithStart:156 end:70], + [[MPPConnection alloc] initWithStart:70 end:139], + [[MPPConnection alloc] initWithStart:139 end:156], + [[MPPConnection alloc] initWithStart:188 end:122], + [[MPPConnection alloc] initWithStart:122 end:245], + [[MPPConnection alloc] initWithStart:245 end:188], + [[MPPConnection alloc] initWithStart:139 end:71], + [[MPPConnection alloc] initWithStart:71 end:162], + [[MPPConnection alloc] initWithStart:162 end:139], + [[MPPConnection alloc] initWithStart:149 end:170], + [[MPPConnection alloc] initWithStart:170 end:150], + [[MPPConnection alloc] initWithStart:150 end:149], + [[MPPConnection alloc] initWithStart:122 end:188], + [[MPPConnection alloc] initWithStart:188 end:196], + [[MPPConnection alloc] initWithStart:196 end:122], + [[MPPConnection alloc] initWithStart:206 end:216], + [[MPPConnection alloc] initWithStart:216 end:92], + [[MPPConnection alloc] initWithStart:92 end:206], + [[MPPConnection alloc] initWithStart:164 end:2], + [[MPPConnection alloc] initWithStart:2 end:167], + [[MPPConnection alloc] initWithStart:167 end:164], + [[MPPConnection alloc] initWithStart:242 end:141], + [[MPPConnection alloc] initWithStart:141 end:241], + [[MPPConnection alloc] initWithStart:241 end:242], + [[MPPConnection alloc] initWithStart:0 end:164], + [[MPPConnection alloc] initWithStart:164 end:37], + [[MPPConnection alloc] initWithStart:37 end:0], + [[MPPConnection alloc] initWithStart:11 end:72], + [[MPPConnection alloc] initWithStart:72 end:12], + [[MPPConnection alloc] initWithStart:12 end:11], + [[MPPConnection alloc] initWithStart:12 end:38], + [[MPPConnection alloc] initWithStart:38 end:13], + [[MPPConnection alloc] initWithStart:13 end:12], + [[MPPConnection alloc] initWithStart:70 end:63], + [[MPPConnection alloc] initWithStart:63 end:71], + [[MPPConnection alloc] initWithStart:71 end:70], + [[MPPConnection alloc] initWithStart:31 end:226], + [[MPPConnection alloc] initWithStart:226 end:111], + [[MPPConnection alloc] initWithStart:111 end:31], + [[MPPConnection alloc] initWithStart:36 end:101], + [[MPPConnection alloc] initWithStart:101 end:205], + [[MPPConnection alloc] initWithStart:205 end:36], + [[MPPConnection alloc] initWithStart:203 end:206], + [[MPPConnection alloc] initWithStart:206 end:165], + [[MPPConnection alloc] initWithStart:165 end:203], + [[MPPConnection alloc] initWithStart:126 end:209], + [[MPPConnection alloc] initWithStart:209 end:217], + [[MPPConnection alloc] initWithStart:217 end:126], + [[MPPConnection alloc] initWithStart:98 end:165], + [[MPPConnection alloc] initWithStart:165 end:97], + [[MPPConnection alloc] initWithStart:97 end:98], + [[MPPConnection alloc] initWithStart:237 end:220], + [[MPPConnection alloc] initWithStart:220 end:218], + [[MPPConnection alloc] initWithStart:218 end:237], + [[MPPConnection alloc] initWithStart:237 end:239], + [[MPPConnection alloc] initWithStart:239 end:241], + [[MPPConnection alloc] initWithStart:241 end:237], + [[MPPConnection alloc] initWithStart:210 end:214], + [[MPPConnection alloc] initWithStart:214 end:169], + [[MPPConnection alloc] initWithStart:169 end:210], + [[MPPConnection alloc] initWithStart:140 end:171], + [[MPPConnection alloc] initWithStart:171 end:32], + [[MPPConnection alloc] initWithStart:32 end:140], + [[MPPConnection alloc] initWithStart:241 end:125], + [[MPPConnection alloc] initWithStart:125 end:237], + [[MPPConnection alloc] initWithStart:237 end:241], + [[MPPConnection alloc] initWithStart:179 end:86], + [[MPPConnection alloc] initWithStart:86 end:178], + [[MPPConnection alloc] initWithStart:178 end:179], + [[MPPConnection alloc] initWithStart:180 end:85], + [[MPPConnection alloc] initWithStart:85 end:179], + [[MPPConnection alloc] initWithStart:179 end:180], + [[MPPConnection alloc] initWithStart:181 end:84], + [[MPPConnection alloc] initWithStart:84 end:180], + [[MPPConnection alloc] initWithStart:180 end:181], + [[MPPConnection alloc] initWithStart:182 end:83], + [[MPPConnection alloc] initWithStart:83 end:181], + [[MPPConnection alloc] initWithStart:181 end:182], + [[MPPConnection alloc] initWithStart:194 end:201], + [[MPPConnection alloc] initWithStart:201 end:182], + [[MPPConnection alloc] initWithStart:182 end:194], + [[MPPConnection alloc] initWithStart:177 end:137], + [[MPPConnection alloc] initWithStart:137 end:132], + [[MPPConnection alloc] initWithStart:132 end:177], + [[MPPConnection alloc] initWithStart:184 end:76], + [[MPPConnection alloc] initWithStart:76 end:183], + [[MPPConnection alloc] initWithStart:183 end:184], + [[MPPConnection alloc] initWithStart:185 end:61], + [[MPPConnection alloc] initWithStart:61 end:184], + [[MPPConnection alloc] initWithStart:184 end:185], + [[MPPConnection alloc] initWithStart:186 end:57], + [[MPPConnection alloc] initWithStart:57 end:185], + [[MPPConnection alloc] initWithStart:185 end:186], + [[MPPConnection alloc] initWithStart:216 end:212], + [[MPPConnection alloc] initWithStart:212 end:186], + [[MPPConnection alloc] initWithStart:186 end:216], + [[MPPConnection alloc] initWithStart:192 end:214], + [[MPPConnection alloc] initWithStart:214 end:187], + [[MPPConnection alloc] initWithStart:187 end:192], + [[MPPConnection alloc] initWithStart:139 end:34], + [[MPPConnection alloc] initWithStart:34 end:156], + [[MPPConnection alloc] initWithStart:156 end:139], + [[MPPConnection alloc] initWithStart:218 end:79], + [[MPPConnection alloc] initWithStart:79 end:237], + [[MPPConnection alloc] initWithStart:237 end:218], + [[MPPConnection alloc] initWithStart:147 end:123], + [[MPPConnection alloc] initWithStart:123 end:177], + [[MPPConnection alloc] initWithStart:177 end:147], + [[MPPConnection alloc] initWithStart:45 end:44], + [[MPPConnection alloc] initWithStart:44 end:4], + [[MPPConnection alloc] initWithStart:4 end:45], + [[MPPConnection alloc] initWithStart:208 end:201], + [[MPPConnection alloc] initWithStart:201 end:32], + [[MPPConnection alloc] initWithStart:32 end:208], + [[MPPConnection alloc] initWithStart:98 end:64], + [[MPPConnection alloc] initWithStart:64 end:129], + [[MPPConnection alloc] initWithStart:129 end:98], + [[MPPConnection alloc] initWithStart:192 end:213], + [[MPPConnection alloc] initWithStart:213 end:138], + [[MPPConnection alloc] initWithStart:138 end:192], + [[MPPConnection alloc] initWithStart:235 end:59], + [[MPPConnection alloc] initWithStart:59 end:219], + [[MPPConnection alloc] initWithStart:219 end:235], + [[MPPConnection alloc] initWithStart:141 end:242], + [[MPPConnection alloc] initWithStart:242 end:97], + [[MPPConnection alloc] initWithStart:97 end:141], + [[MPPConnection alloc] initWithStart:97 end:2], + [[MPPConnection alloc] initWithStart:2 end:141], + [[MPPConnection alloc] initWithStart:141 end:97], + [[MPPConnection alloc] initWithStart:240 end:75], + [[MPPConnection alloc] initWithStart:75 end:235], + [[MPPConnection alloc] initWithStart:235 end:240], + [[MPPConnection alloc] initWithStart:229 end:24], + [[MPPConnection alloc] initWithStart:24 end:228], + [[MPPConnection alloc] initWithStart:228 end:229], + [[MPPConnection alloc] initWithStart:31 end:25], + [[MPPConnection alloc] initWithStart:25 end:226], + [[MPPConnection alloc] initWithStart:226 end:31], + [[MPPConnection alloc] initWithStart:230 end:23], + [[MPPConnection alloc] initWithStart:23 end:229], + [[MPPConnection alloc] initWithStart:229 end:230], + [[MPPConnection alloc] initWithStart:231 end:22], + [[MPPConnection alloc] initWithStart:22 end:230], + [[MPPConnection alloc] initWithStart:230 end:231], + [[MPPConnection alloc] initWithStart:232 end:26], + [[MPPConnection alloc] initWithStart:26 end:231], + [[MPPConnection alloc] initWithStart:231 end:232], + [[MPPConnection alloc] initWithStart:233 end:112], + [[MPPConnection alloc] initWithStart:112 end:232], + [[MPPConnection alloc] initWithStart:232 end:233], + [[MPPConnection alloc] initWithStart:244 end:189], + [[MPPConnection alloc] initWithStart:189 end:243], + [[MPPConnection alloc] initWithStart:243 end:244], + [[MPPConnection alloc] initWithStart:189 end:221], + [[MPPConnection alloc] initWithStart:221 end:190], + [[MPPConnection alloc] initWithStart:190 end:189], + [[MPPConnection alloc] initWithStart:222 end:28], + [[MPPConnection alloc] initWithStart:28 end:221], + [[MPPConnection alloc] initWithStart:221 end:222], + [[MPPConnection alloc] initWithStart:223 end:27], + [[MPPConnection alloc] initWithStart:27 end:222], + [[MPPConnection alloc] initWithStart:222 end:223], + [[MPPConnection alloc] initWithStart:224 end:29], + [[MPPConnection alloc] initWithStart:29 end:223], + [[MPPConnection alloc] initWithStart:223 end:224], + [[MPPConnection alloc] initWithStart:225 end:30], + [[MPPConnection alloc] initWithStart:30 end:224], + [[MPPConnection alloc] initWithStart:224 end:225], + [[MPPConnection alloc] initWithStart:113 end:247], + [[MPPConnection alloc] initWithStart:247 end:225], + [[MPPConnection alloc] initWithStart:225 end:113], + [[MPPConnection alloc] initWithStart:99 end:60], + [[MPPConnection alloc] initWithStart:60 end:240], + [[MPPConnection alloc] initWithStart:240 end:99], + [[MPPConnection alloc] initWithStart:213 end:147], + [[MPPConnection alloc] initWithStart:147 end:215], + [[MPPConnection alloc] initWithStart:215 end:213], + [[MPPConnection alloc] initWithStart:60 end:20], + [[MPPConnection alloc] initWithStart:20 end:166], + [[MPPConnection alloc] initWithStart:166 end:60], + [[MPPConnection alloc] initWithStart:192 end:187], + [[MPPConnection alloc] initWithStart:187 end:213], + [[MPPConnection alloc] initWithStart:213 end:192], + [[MPPConnection alloc] initWithStart:243 end:112], + [[MPPConnection alloc] initWithStart:112 end:244], + [[MPPConnection alloc] initWithStart:244 end:243], + [[MPPConnection alloc] initWithStart:244 end:233], + [[MPPConnection alloc] initWithStart:233 end:245], + [[MPPConnection alloc] initWithStart:245 end:244], + [[MPPConnection alloc] initWithStart:245 end:128], + [[MPPConnection alloc] initWithStart:128 end:188], + [[MPPConnection alloc] initWithStart:188 end:245], + [[MPPConnection alloc] initWithStart:188 end:114], + [[MPPConnection alloc] initWithStart:114 end:174], + [[MPPConnection alloc] initWithStart:174 end:188], + [[MPPConnection alloc] initWithStart:134 end:131], + [[MPPConnection alloc] initWithStart:131 end:220], + [[MPPConnection alloc] initWithStart:220 end:134], + [[MPPConnection alloc] initWithStart:174 end:217], + [[MPPConnection alloc] initWithStart:217 end:236], + [[MPPConnection alloc] initWithStart:236 end:174], + [[MPPConnection alloc] initWithStart:236 end:198], + [[MPPConnection alloc] initWithStart:198 end:134], + [[MPPConnection alloc] initWithStart:134 end:236], + [[MPPConnection alloc] initWithStart:215 end:177], + [[MPPConnection alloc] initWithStart:177 end:58], + [[MPPConnection alloc] initWithStart:58 end:215], + [[MPPConnection alloc] initWithStart:156 end:143], + [[MPPConnection alloc] initWithStart:143 end:124], + [[MPPConnection alloc] initWithStart:124 end:156], + [[MPPConnection alloc] initWithStart:25 end:110], + [[MPPConnection alloc] initWithStart:110 end:7], + [[MPPConnection alloc] initWithStart:7 end:25], + [[MPPConnection alloc] initWithStart:31 end:228], + [[MPPConnection alloc] initWithStart:228 end:25], + [[MPPConnection alloc] initWithStart:25 end:31], + [[MPPConnection alloc] initWithStart:264 end:356], + [[MPPConnection alloc] initWithStart:356 end:368], + [[MPPConnection alloc] initWithStart:368 end:264], + [[MPPConnection alloc] initWithStart:0 end:11], + [[MPPConnection alloc] initWithStart:11 end:267], + [[MPPConnection alloc] initWithStart:267 end:0], + [[MPPConnection alloc] initWithStart:451 end:452], + [[MPPConnection alloc] initWithStart:452 end:349], + [[MPPConnection alloc] initWithStart:349 end:451], + [[MPPConnection alloc] initWithStart:267 end:302], + [[MPPConnection alloc] initWithStart:302 end:269], + [[MPPConnection alloc] initWithStart:269 end:267], + [[MPPConnection alloc] initWithStart:350 end:357], + [[MPPConnection alloc] initWithStart:357 end:277], + [[MPPConnection alloc] initWithStart:277 end:350], + [[MPPConnection alloc] initWithStart:350 end:452], + [[MPPConnection alloc] initWithStart:452 end:357], + [[MPPConnection alloc] initWithStart:357 end:350], + [[MPPConnection alloc] initWithStart:299 end:333], + [[MPPConnection alloc] initWithStart:333 end:297], + [[MPPConnection alloc] initWithStart:297 end:299], + [[MPPConnection alloc] initWithStart:396 end:175], + [[MPPConnection alloc] initWithStart:175 end:377], + [[MPPConnection alloc] initWithStart:377 end:396], + [[MPPConnection alloc] initWithStart:280 end:347], + [[MPPConnection alloc] initWithStart:347 end:330], + [[MPPConnection alloc] initWithStart:330 end:280], + [[MPPConnection alloc] initWithStart:269 end:303], + [[MPPConnection alloc] initWithStart:303 end:270], + [[MPPConnection alloc] initWithStart:270 end:269], + [[MPPConnection alloc] initWithStart:151 end:9], + [[MPPConnection alloc] initWithStart:9 end:337], + [[MPPConnection alloc] initWithStart:337 end:151], + [[MPPConnection alloc] initWithStart:344 end:278], + [[MPPConnection alloc] initWithStart:278 end:360], + [[MPPConnection alloc] initWithStart:360 end:344], + [[MPPConnection alloc] initWithStart:424 end:418], + [[MPPConnection alloc] initWithStart:418 end:431], + [[MPPConnection alloc] initWithStart:431 end:424], + [[MPPConnection alloc] initWithStart:270 end:304], + [[MPPConnection alloc] initWithStart:304 end:409], + [[MPPConnection alloc] initWithStart:409 end:270], + [[MPPConnection alloc] initWithStart:272 end:310], + [[MPPConnection alloc] initWithStart:310 end:407], + [[MPPConnection alloc] initWithStart:407 end:272], + [[MPPConnection alloc] initWithStart:322 end:270], + [[MPPConnection alloc] initWithStart:270 end:410], + [[MPPConnection alloc] initWithStart:410 end:322], + [[MPPConnection alloc] initWithStart:449 end:450], + [[MPPConnection alloc] initWithStart:450 end:347], + [[MPPConnection alloc] initWithStart:347 end:449], + [[MPPConnection alloc] initWithStart:432 end:422], + [[MPPConnection alloc] initWithStart:422 end:434], + [[MPPConnection alloc] initWithStart:434 end:432], + [[MPPConnection alloc] initWithStart:18 end:313], + [[MPPConnection alloc] initWithStart:313 end:17], + [[MPPConnection alloc] initWithStart:17 end:18], + [[MPPConnection alloc] initWithStart:291 end:306], + [[MPPConnection alloc] initWithStart:306 end:375], + [[MPPConnection alloc] initWithStart:375 end:291], + [[MPPConnection alloc] initWithStart:259 end:387], + [[MPPConnection alloc] initWithStart:387 end:260], + [[MPPConnection alloc] initWithStart:260 end:259], + [[MPPConnection alloc] initWithStart:424 end:335], + [[MPPConnection alloc] initWithStart:335 end:418], + [[MPPConnection alloc] initWithStart:418 end:424], + [[MPPConnection alloc] initWithStart:434 end:364], + [[MPPConnection alloc] initWithStart:364 end:416], + [[MPPConnection alloc] initWithStart:416 end:434], + [[MPPConnection alloc] initWithStart:391 end:423], + [[MPPConnection alloc] initWithStart:423 end:327], + [[MPPConnection alloc] initWithStart:327 end:391], + [[MPPConnection alloc] initWithStart:301 end:251], + [[MPPConnection alloc] initWithStart:251 end:298], + [[MPPConnection alloc] initWithStart:298 end:301], + [[MPPConnection alloc] initWithStart:275 end:281], + [[MPPConnection alloc] initWithStart:281 end:4], + [[MPPConnection alloc] initWithStart:4 end:275], + [[MPPConnection alloc] initWithStart:254 end:373], + [[MPPConnection alloc] initWithStart:373 end:253], + [[MPPConnection alloc] initWithStart:253 end:254], + [[MPPConnection alloc] initWithStart:375 end:307], + [[MPPConnection alloc] initWithStart:307 end:321], + [[MPPConnection alloc] initWithStart:321 end:375], + [[MPPConnection alloc] initWithStart:280 end:425], + [[MPPConnection alloc] initWithStart:425 end:411], + [[MPPConnection alloc] initWithStart:411 end:280], + [[MPPConnection alloc] initWithStart:200 end:421], + [[MPPConnection alloc] initWithStart:421 end:18], + [[MPPConnection alloc] initWithStart:18 end:200], + [[MPPConnection alloc] initWithStart:335 end:321], + [[MPPConnection alloc] initWithStart:321 end:406], + [[MPPConnection alloc] initWithStart:406 end:335], + [[MPPConnection alloc] initWithStart:321 end:320], + [[MPPConnection alloc] initWithStart:320 end:405], + [[MPPConnection alloc] initWithStart:405 end:321], + [[MPPConnection alloc] initWithStart:314 end:315], + [[MPPConnection alloc] initWithStart:315 end:17], + [[MPPConnection alloc] initWithStart:17 end:314], + [[MPPConnection alloc] initWithStart:423 end:426], + [[MPPConnection alloc] initWithStart:426 end:266], + [[MPPConnection alloc] initWithStart:266 end:423], + [[MPPConnection alloc] initWithStart:396 end:377], + [[MPPConnection alloc] initWithStart:377 end:369], + [[MPPConnection alloc] initWithStart:369 end:396], + [[MPPConnection alloc] initWithStart:270 end:322], + [[MPPConnection alloc] initWithStart:322 end:269], + [[MPPConnection alloc] initWithStart:269 end:270], + [[MPPConnection alloc] initWithStart:413 end:417], + [[MPPConnection alloc] initWithStart:417 end:464], + [[MPPConnection alloc] initWithStart:464 end:413], + [[MPPConnection alloc] initWithStart:385 end:386], + [[MPPConnection alloc] initWithStart:386 end:258], + [[MPPConnection alloc] initWithStart:258 end:385], + [[MPPConnection alloc] initWithStart:248 end:456], + [[MPPConnection alloc] initWithStart:456 end:419], + [[MPPConnection alloc] initWithStart:419 end:248], + [[MPPConnection alloc] initWithStart:298 end:284], + [[MPPConnection alloc] initWithStart:284 end:333], + [[MPPConnection alloc] initWithStart:333 end:298], + [[MPPConnection alloc] initWithStart:168 end:417], + [[MPPConnection alloc] initWithStart:417 end:8], + [[MPPConnection alloc] initWithStart:8 end:168], + [[MPPConnection alloc] initWithStart:448 end:346], + [[MPPConnection alloc] initWithStart:346 end:261], + [[MPPConnection alloc] initWithStart:261 end:448], + [[MPPConnection alloc] initWithStart:417 end:413], + [[MPPConnection alloc] initWithStart:413 end:285], + [[MPPConnection alloc] initWithStart:285 end:417], + [[MPPConnection alloc] initWithStart:326 end:327], + [[MPPConnection alloc] initWithStart:327 end:328], + [[MPPConnection alloc] initWithStart:328 end:326], + [[MPPConnection alloc] initWithStart:277 end:355], + [[MPPConnection alloc] initWithStart:355 end:329], + [[MPPConnection alloc] initWithStart:329 end:277], + [[MPPConnection alloc] initWithStart:309 end:392], + [[MPPConnection alloc] initWithStart:392 end:438], + [[MPPConnection alloc] initWithStart:438 end:309], + [[MPPConnection alloc] initWithStart:381 end:382], + [[MPPConnection alloc] initWithStart:382 end:256], + [[MPPConnection alloc] initWithStart:256 end:381], + [[MPPConnection alloc] initWithStart:279 end:429], + [[MPPConnection alloc] initWithStart:429 end:360], + [[MPPConnection alloc] initWithStart:360 end:279], + [[MPPConnection alloc] initWithStart:365 end:364], + [[MPPConnection alloc] initWithStart:364 end:379], + [[MPPConnection alloc] initWithStart:379 end:365], + [[MPPConnection alloc] initWithStart:355 end:277], + [[MPPConnection alloc] initWithStart:277 end:437], + [[MPPConnection alloc] initWithStart:437 end:355], + [[MPPConnection alloc] initWithStart:282 end:443], + [[MPPConnection alloc] initWithStart:443 end:283], + [[MPPConnection alloc] initWithStart:283 end:282], + [[MPPConnection alloc] initWithStart:281 end:275], + [[MPPConnection alloc] initWithStart:275 end:363], + [[MPPConnection alloc] initWithStart:363 end:281], + [[MPPConnection alloc] initWithStart:395 end:431], + [[MPPConnection alloc] initWithStart:431 end:369], + [[MPPConnection alloc] initWithStart:369 end:395], + [[MPPConnection alloc] initWithStart:299 end:297], + [[MPPConnection alloc] initWithStart:297 end:337], + [[MPPConnection alloc] initWithStart:337 end:299], + [[MPPConnection alloc] initWithStart:335 end:273], + [[MPPConnection alloc] initWithStart:273 end:321], + [[MPPConnection alloc] initWithStart:321 end:335], + [[MPPConnection alloc] initWithStart:348 end:450], + [[MPPConnection alloc] initWithStart:450 end:349], + [[MPPConnection alloc] initWithStart:349 end:348], + [[MPPConnection alloc] initWithStart:359 end:446], + [[MPPConnection alloc] initWithStart:446 end:467], + [[MPPConnection alloc] initWithStart:467 end:359], + [[MPPConnection alloc] initWithStart:283 end:293], + [[MPPConnection alloc] initWithStart:293 end:282], + [[MPPConnection alloc] initWithStart:282 end:283], + [[MPPConnection alloc] initWithStart:250 end:458], + [[MPPConnection alloc] initWithStart:458 end:462], + [[MPPConnection alloc] initWithStart:462 end:250], + [[MPPConnection alloc] initWithStart:300 end:276], + [[MPPConnection alloc] initWithStart:276 end:383], + [[MPPConnection alloc] initWithStart:383 end:300], + [[MPPConnection alloc] initWithStart:292 end:308], + [[MPPConnection alloc] initWithStart:308 end:325], + [[MPPConnection alloc] initWithStart:325 end:292], + [[MPPConnection alloc] initWithStart:283 end:276], + [[MPPConnection alloc] initWithStart:276 end:293], + [[MPPConnection alloc] initWithStart:293 end:283], + [[MPPConnection alloc] initWithStart:264 end:372], + [[MPPConnection alloc] initWithStart:372 end:447], + [[MPPConnection alloc] initWithStart:447 end:264], + [[MPPConnection alloc] initWithStart:346 end:352], + [[MPPConnection alloc] initWithStart:352 end:340], + [[MPPConnection alloc] initWithStart:340 end:346], + [[MPPConnection alloc] initWithStart:354 end:274], + [[MPPConnection alloc] initWithStart:274 end:19], + [[MPPConnection alloc] initWithStart:19 end:354], + [[MPPConnection alloc] initWithStart:363 end:456], + [[MPPConnection alloc] initWithStart:456 end:281], + [[MPPConnection alloc] initWithStart:281 end:363], + [[MPPConnection alloc] initWithStart:426 end:436], + [[MPPConnection alloc] initWithStart:436 end:425], + [[MPPConnection alloc] initWithStart:425 end:426], + [[MPPConnection alloc] initWithStart:380 end:381], + [[MPPConnection alloc] initWithStart:381 end:252], + [[MPPConnection alloc] initWithStart:252 end:380], + [[MPPConnection alloc] initWithStart:267 end:269], + [[MPPConnection alloc] initWithStart:269 end:393], + [[MPPConnection alloc] initWithStart:393 end:267], + [[MPPConnection alloc] initWithStart:421 end:200], + [[MPPConnection alloc] initWithStart:200 end:428], + [[MPPConnection alloc] initWithStart:428 end:421], + [[MPPConnection alloc] initWithStart:371 end:266], + [[MPPConnection alloc] initWithStart:266 end:329], + [[MPPConnection alloc] initWithStart:329 end:371], + [[MPPConnection alloc] initWithStart:432 end:287], + [[MPPConnection alloc] initWithStart:287 end:422], + [[MPPConnection alloc] initWithStart:422 end:432], + [[MPPConnection alloc] initWithStart:290 end:250], + [[MPPConnection alloc] initWithStart:250 end:328], + [[MPPConnection alloc] initWithStart:328 end:290], + [[MPPConnection alloc] initWithStart:385 end:258], + [[MPPConnection alloc] initWithStart:258 end:384], + [[MPPConnection alloc] initWithStart:384 end:385], + [[MPPConnection alloc] initWithStart:446 end:265], + [[MPPConnection alloc] initWithStart:265 end:342], + [[MPPConnection alloc] initWithStart:342 end:446], + [[MPPConnection alloc] initWithStart:386 end:387], + [[MPPConnection alloc] initWithStart:387 end:257], + [[MPPConnection alloc] initWithStart:257 end:386], + [[MPPConnection alloc] initWithStart:422 end:424], + [[MPPConnection alloc] initWithStart:424 end:430], + [[MPPConnection alloc] initWithStart:430 end:422], + [[MPPConnection alloc] initWithStart:445 end:342], + [[MPPConnection alloc] initWithStart:342 end:276], + [[MPPConnection alloc] initWithStart:276 end:445], + [[MPPConnection alloc] initWithStart:422 end:273], + [[MPPConnection alloc] initWithStart:273 end:424], + [[MPPConnection alloc] initWithStart:424 end:422], + [[MPPConnection alloc] initWithStart:306 end:292], + [[MPPConnection alloc] initWithStart:292 end:307], + [[MPPConnection alloc] initWithStart:307 end:306], + [[MPPConnection alloc] initWithStart:352 end:366], + [[MPPConnection alloc] initWithStart:366 end:345], + [[MPPConnection alloc] initWithStart:345 end:352], + [[MPPConnection alloc] initWithStart:268 end:271], + [[MPPConnection alloc] initWithStart:271 end:302], + [[MPPConnection alloc] initWithStart:302 end:268], + [[MPPConnection alloc] initWithStart:358 end:423], + [[MPPConnection alloc] initWithStart:423 end:371], + [[MPPConnection alloc] initWithStart:371 end:358], + [[MPPConnection alloc] initWithStart:327 end:294], + [[MPPConnection alloc] initWithStart:294 end:460], + [[MPPConnection alloc] initWithStart:460 end:327], + [[MPPConnection alloc] initWithStart:331 end:279], + [[MPPConnection alloc] initWithStart:279 end:294], + [[MPPConnection alloc] initWithStart:294 end:331], + [[MPPConnection alloc] initWithStart:303 end:271], + [[MPPConnection alloc] initWithStart:271 end:304], + [[MPPConnection alloc] initWithStart:304 end:303], + [[MPPConnection alloc] initWithStart:436 end:432], + [[MPPConnection alloc] initWithStart:432 end:427], + [[MPPConnection alloc] initWithStart:427 end:436], + [[MPPConnection alloc] initWithStart:304 end:272], + [[MPPConnection alloc] initWithStart:272 end:408], + [[MPPConnection alloc] initWithStart:408 end:304], + [[MPPConnection alloc] initWithStart:395 end:394], + [[MPPConnection alloc] initWithStart:394 end:431], + [[MPPConnection alloc] initWithStart:431 end:395], + [[MPPConnection alloc] initWithStart:378 end:395], + [[MPPConnection alloc] initWithStart:395 end:400], + [[MPPConnection alloc] initWithStart:400 end:378], + [[MPPConnection alloc] initWithStart:296 end:334], + [[MPPConnection alloc] initWithStart:334 end:299], + [[MPPConnection alloc] initWithStart:299 end:296], + [[MPPConnection alloc] initWithStart:6 end:351], + [[MPPConnection alloc] initWithStart:351 end:168], + [[MPPConnection alloc] initWithStart:168 end:6], + [[MPPConnection alloc] initWithStart:376 end:352], + [[MPPConnection alloc] initWithStart:352 end:411], + [[MPPConnection alloc] initWithStart:411 end:376], + [[MPPConnection alloc] initWithStart:307 end:325], + [[MPPConnection alloc] initWithStart:325 end:320], + [[MPPConnection alloc] initWithStart:320 end:307], + [[MPPConnection alloc] initWithStart:285 end:295], + [[MPPConnection alloc] initWithStart:295 end:336], + [[MPPConnection alloc] initWithStart:336 end:285], + [[MPPConnection alloc] initWithStart:320 end:319], + [[MPPConnection alloc] initWithStart:319 end:404], + [[MPPConnection alloc] initWithStart:404 end:320], + [[MPPConnection alloc] initWithStart:329 end:330], + [[MPPConnection alloc] initWithStart:330 end:349], + [[MPPConnection alloc] initWithStart:349 end:329], + [[MPPConnection alloc] initWithStart:334 end:293], + [[MPPConnection alloc] initWithStart:293 end:333], + [[MPPConnection alloc] initWithStart:333 end:334], + [[MPPConnection alloc] initWithStart:366 end:323], + [[MPPConnection alloc] initWithStart:323 end:447], + [[MPPConnection alloc] initWithStart:447 end:366], + [[MPPConnection alloc] initWithStart:316 end:15], + [[MPPConnection alloc] initWithStart:15 end:315], + [[MPPConnection alloc] initWithStart:315 end:316], + [[MPPConnection alloc] initWithStart:331 end:358], + [[MPPConnection alloc] initWithStart:358 end:279], + [[MPPConnection alloc] initWithStart:279 end:331], + [[MPPConnection alloc] initWithStart:317 end:14], + [[MPPConnection alloc] initWithStart:14 end:316], + [[MPPConnection alloc] initWithStart:316 end:317], + [[MPPConnection alloc] initWithStart:8 end:285], + [[MPPConnection alloc] initWithStart:285 end:9], + [[MPPConnection alloc] initWithStart:9 end:8], + [[MPPConnection alloc] initWithStart:277 end:329], + [[MPPConnection alloc] initWithStart:329 end:350], + [[MPPConnection alloc] initWithStart:350 end:277], + [[MPPConnection alloc] initWithStart:253 end:374], + [[MPPConnection alloc] initWithStart:374 end:252], + [[MPPConnection alloc] initWithStart:252 end:253], + [[MPPConnection alloc] initWithStart:319 end:318], + [[MPPConnection alloc] initWithStart:318 end:403], + [[MPPConnection alloc] initWithStart:403 end:319], + [[MPPConnection alloc] initWithStart:351 end:6], + [[MPPConnection alloc] initWithStart:6 end:419], + [[MPPConnection alloc] initWithStart:419 end:351], + [[MPPConnection alloc] initWithStart:324 end:318], + [[MPPConnection alloc] initWithStart:318 end:325], + [[MPPConnection alloc] initWithStart:325 end:324], + [[MPPConnection alloc] initWithStart:397 end:367], + [[MPPConnection alloc] initWithStart:367 end:365], + [[MPPConnection alloc] initWithStart:365 end:397], + [[MPPConnection alloc] initWithStart:288 end:435], + [[MPPConnection alloc] initWithStart:435 end:397], + [[MPPConnection alloc] initWithStart:397 end:288], + [[MPPConnection alloc] initWithStart:278 end:344], + [[MPPConnection alloc] initWithStart:344 end:439], + [[MPPConnection alloc] initWithStart:439 end:278], + [[MPPConnection alloc] initWithStart:310 end:272], + [[MPPConnection alloc] initWithStart:272 end:311], + [[MPPConnection alloc] initWithStart:311 end:310], + [[MPPConnection alloc] initWithStart:248 end:195], + [[MPPConnection alloc] initWithStart:195 end:281], + [[MPPConnection alloc] initWithStart:281 end:248], + [[MPPConnection alloc] initWithStart:375 end:273], + [[MPPConnection alloc] initWithStart:273 end:291], + [[MPPConnection alloc] initWithStart:291 end:375], + [[MPPConnection alloc] initWithStart:175 end:396], + [[MPPConnection alloc] initWithStart:396 end:199], + [[MPPConnection alloc] initWithStart:199 end:175], + [[MPPConnection alloc] initWithStart:312 end:311], + [[MPPConnection alloc] initWithStart:311 end:268], + [[MPPConnection alloc] initWithStart:268 end:312], + [[MPPConnection alloc] initWithStart:276 end:283], + [[MPPConnection alloc] initWithStart:283 end:445], + [[MPPConnection alloc] initWithStart:445 end:276], + [[MPPConnection alloc] initWithStart:390 end:373], + [[MPPConnection alloc] initWithStart:373 end:339], + [[MPPConnection alloc] initWithStart:339 end:390], + [[MPPConnection alloc] initWithStart:295 end:282], + [[MPPConnection alloc] initWithStart:282 end:296], + [[MPPConnection alloc] initWithStart:296 end:295], + [[MPPConnection alloc] initWithStart:448 end:449], + [[MPPConnection alloc] initWithStart:449 end:346], + [[MPPConnection alloc] initWithStart:346 end:448], + [[MPPConnection alloc] initWithStart:356 end:264], + [[MPPConnection alloc] initWithStart:264 end:454], + [[MPPConnection alloc] initWithStart:454 end:356], + [[MPPConnection alloc] initWithStart:337 end:336], + [[MPPConnection alloc] initWithStart:336 end:299], + [[MPPConnection alloc] initWithStart:299 end:337], + [[MPPConnection alloc] initWithStart:337 end:338], + [[MPPConnection alloc] initWithStart:338 end:151], + [[MPPConnection alloc] initWithStart:151 end:337], + [[MPPConnection alloc] initWithStart:294 end:278], + [[MPPConnection alloc] initWithStart:278 end:455], + [[MPPConnection alloc] initWithStart:455 end:294], + [[MPPConnection alloc] initWithStart:308 end:292], + [[MPPConnection alloc] initWithStart:292 end:415], + [[MPPConnection alloc] initWithStart:415 end:308], + [[MPPConnection alloc] initWithStart:429 end:358], + [[MPPConnection alloc] initWithStart:358 end:355], + [[MPPConnection alloc] initWithStart:355 end:429], + [[MPPConnection alloc] initWithStart:265 end:340], + [[MPPConnection alloc] initWithStart:340 end:372], + [[MPPConnection alloc] initWithStart:372 end:265], + [[MPPConnection alloc] initWithStart:352 end:346], + [[MPPConnection alloc] initWithStart:346 end:280], + [[MPPConnection alloc] initWithStart:280 end:352], + [[MPPConnection alloc] initWithStart:295 end:442], + [[MPPConnection alloc] initWithStart:442 end:282], + [[MPPConnection alloc] initWithStart:282 end:295], + [[MPPConnection alloc] initWithStart:354 end:19], + [[MPPConnection alloc] initWithStart:19 end:370], + [[MPPConnection alloc] initWithStart:370 end:354], + [[MPPConnection alloc] initWithStart:285 end:441], + [[MPPConnection alloc] initWithStart:441 end:295], + [[MPPConnection alloc] initWithStart:295 end:285], + [[MPPConnection alloc] initWithStart:195 end:248], + [[MPPConnection alloc] initWithStart:248 end:197], + [[MPPConnection alloc] initWithStart:197 end:195], + [[MPPConnection alloc] initWithStart:457 end:440], + [[MPPConnection alloc] initWithStart:440 end:274], + [[MPPConnection alloc] initWithStart:274 end:457], + [[MPPConnection alloc] initWithStart:301 end:300], + [[MPPConnection alloc] initWithStart:300 end:368], + [[MPPConnection alloc] initWithStart:368 end:301], + [[MPPConnection alloc] initWithStart:417 end:351], + [[MPPConnection alloc] initWithStart:351 end:465], + [[MPPConnection alloc] initWithStart:465 end:417], + [[MPPConnection alloc] initWithStart:251 end:301], + [[MPPConnection alloc] initWithStart:301 end:389], + [[MPPConnection alloc] initWithStart:389 end:251], + [[MPPConnection alloc] initWithStart:394 end:395], + [[MPPConnection alloc] initWithStart:395 end:379], + [[MPPConnection alloc] initWithStart:379 end:394], + [[MPPConnection alloc] initWithStart:399 end:412], + [[MPPConnection alloc] initWithStart:412 end:419], + [[MPPConnection alloc] initWithStart:419 end:399], + [[MPPConnection alloc] initWithStart:410 end:436], + [[MPPConnection alloc] initWithStart:436 end:322], + [[MPPConnection alloc] initWithStart:322 end:410], + [[MPPConnection alloc] initWithStart:326 end:2], + [[MPPConnection alloc] initWithStart:2 end:393], + [[MPPConnection alloc] initWithStart:393 end:326], + [[MPPConnection alloc] initWithStart:354 end:370], + [[MPPConnection alloc] initWithStart:370 end:461], + [[MPPConnection alloc] initWithStart:461 end:354], + [[MPPConnection alloc] initWithStart:393 end:164], + [[MPPConnection alloc] initWithStart:164 end:267], + [[MPPConnection alloc] initWithStart:267 end:393], + [[MPPConnection alloc] initWithStart:268 end:302], + [[MPPConnection alloc] initWithStart:302 end:12], + [[MPPConnection alloc] initWithStart:12 end:268], + [[MPPConnection alloc] initWithStart:312 end:268], + [[MPPConnection alloc] initWithStart:268 end:13], + [[MPPConnection alloc] initWithStart:13 end:312], + [[MPPConnection alloc] initWithStart:298 end:293], + [[MPPConnection alloc] initWithStart:293 end:301], + [[MPPConnection alloc] initWithStart:301 end:298], + [[MPPConnection alloc] initWithStart:265 end:446], + [[MPPConnection alloc] initWithStart:446 end:340], + [[MPPConnection alloc] initWithStart:340 end:265], + [[MPPConnection alloc] initWithStart:280 end:330], + [[MPPConnection alloc] initWithStart:330 end:425], + [[MPPConnection alloc] initWithStart:425 end:280], + [[MPPConnection alloc] initWithStart:322 end:426], + [[MPPConnection alloc] initWithStart:426 end:391], + [[MPPConnection alloc] initWithStart:391 end:322], + [[MPPConnection alloc] initWithStart:420 end:429], + [[MPPConnection alloc] initWithStart:429 end:437], + [[MPPConnection alloc] initWithStart:437 end:420], + [[MPPConnection alloc] initWithStart:393 end:391], + [[MPPConnection alloc] initWithStart:391 end:326], + [[MPPConnection alloc] initWithStart:326 end:393], + [[MPPConnection alloc] initWithStart:344 end:440], + [[MPPConnection alloc] initWithStart:440 end:438], + [[MPPConnection alloc] initWithStart:438 end:344], + [[MPPConnection alloc] initWithStart:458 end:459], + [[MPPConnection alloc] initWithStart:459 end:461], + [[MPPConnection alloc] initWithStart:461 end:458], + [[MPPConnection alloc] initWithStart:364 end:434], + [[MPPConnection alloc] initWithStart:434 end:394], + [[MPPConnection alloc] initWithStart:394 end:364], + [[MPPConnection alloc] initWithStart:428 end:396], + [[MPPConnection alloc] initWithStart:396 end:262], + [[MPPConnection alloc] initWithStart:262 end:428], + [[MPPConnection alloc] initWithStart:274 end:354], + [[MPPConnection alloc] initWithStart:354 end:457], + [[MPPConnection alloc] initWithStart:457 end:274], + [[MPPConnection alloc] initWithStart:317 end:316], + [[MPPConnection alloc] initWithStart:316 end:402], + [[MPPConnection alloc] initWithStart:402 end:317], + [[MPPConnection alloc] initWithStart:316 end:315], + [[MPPConnection alloc] initWithStart:315 end:403], + [[MPPConnection alloc] initWithStart:403 end:316], + [[MPPConnection alloc] initWithStart:315 end:314], + [[MPPConnection alloc] initWithStart:314 end:404], + [[MPPConnection alloc] initWithStart:404 end:315], + [[MPPConnection alloc] initWithStart:314 end:313], + [[MPPConnection alloc] initWithStart:313 end:405], + [[MPPConnection alloc] initWithStart:405 end:314], + [[MPPConnection alloc] initWithStart:313 end:421], + [[MPPConnection alloc] initWithStart:421 end:406], + [[MPPConnection alloc] initWithStart:406 end:313], + [[MPPConnection alloc] initWithStart:323 end:366], + [[MPPConnection alloc] initWithStart:366 end:361], + [[MPPConnection alloc] initWithStart:361 end:323], + [[MPPConnection alloc] initWithStart:292 end:306], + [[MPPConnection alloc] initWithStart:306 end:407], + [[MPPConnection alloc] initWithStart:407 end:292], + [[MPPConnection alloc] initWithStart:306 end:291], + [[MPPConnection alloc] initWithStart:291 end:408], + [[MPPConnection alloc] initWithStart:408 end:306], + [[MPPConnection alloc] initWithStart:291 end:287], + [[MPPConnection alloc] initWithStart:287 end:409], + [[MPPConnection alloc] initWithStart:409 end:291], + [[MPPConnection alloc] initWithStart:287 end:432], + [[MPPConnection alloc] initWithStart:432 end:410], + [[MPPConnection alloc] initWithStart:410 end:287], + [[MPPConnection alloc] initWithStart:427 end:434], + [[MPPConnection alloc] initWithStart:434 end:411], + [[MPPConnection alloc] initWithStart:411 end:427], + [[MPPConnection alloc] initWithStart:372 end:264], + [[MPPConnection alloc] initWithStart:264 end:383], + [[MPPConnection alloc] initWithStart:383 end:372], + [[MPPConnection alloc] initWithStart:459 end:309], + [[MPPConnection alloc] initWithStart:309 end:457], + [[MPPConnection alloc] initWithStart:457 end:459], + [[MPPConnection alloc] initWithStart:366 end:352], + [[MPPConnection alloc] initWithStart:352 end:401], + [[MPPConnection alloc] initWithStart:401 end:366], + [[MPPConnection alloc] initWithStart:1 end:274], + [[MPPConnection alloc] initWithStart:274 end:4], + [[MPPConnection alloc] initWithStart:4 end:1], + [[MPPConnection alloc] initWithStart:418 end:421], + [[MPPConnection alloc] initWithStart:421 end:262], + [[MPPConnection alloc] initWithStart:262 end:418], + [[MPPConnection alloc] initWithStart:331 end:294], + [[MPPConnection alloc] initWithStart:294 end:358], + [[MPPConnection alloc] initWithStart:358 end:331], + [[MPPConnection alloc] initWithStart:435 end:433], + [[MPPConnection alloc] initWithStart:433 end:367], + [[MPPConnection alloc] initWithStart:367 end:435], + [[MPPConnection alloc] initWithStart:392 end:289], + [[MPPConnection alloc] initWithStart:289 end:439], + [[MPPConnection alloc] initWithStart:439 end:392], + [[MPPConnection alloc] initWithStart:328 end:462], + [[MPPConnection alloc] initWithStart:462 end:326], + [[MPPConnection alloc] initWithStart:326 end:328], + [[MPPConnection alloc] initWithStart:94 end:2], + [[MPPConnection alloc] initWithStart:2 end:370], + [[MPPConnection alloc] initWithStart:370 end:94], + [[MPPConnection alloc] initWithStart:289 end:305], + [[MPPConnection alloc] initWithStart:305 end:455], + [[MPPConnection alloc] initWithStart:455 end:289], + [[MPPConnection alloc] initWithStart:339 end:254], + [[MPPConnection alloc] initWithStart:254 end:448], + [[MPPConnection alloc] initWithStart:448 end:339], + [[MPPConnection alloc] initWithStart:359 end:255], + [[MPPConnection alloc] initWithStart:255 end:446], + [[MPPConnection alloc] initWithStart:446 end:359], + [[MPPConnection alloc] initWithStart:254 end:253], + [[MPPConnection alloc] initWithStart:253 end:449], + [[MPPConnection alloc] initWithStart:449 end:254], + [[MPPConnection alloc] initWithStart:253 end:252], + [[MPPConnection alloc] initWithStart:252 end:450], + [[MPPConnection alloc] initWithStart:450 end:253], + [[MPPConnection alloc] initWithStart:252 end:256], + [[MPPConnection alloc] initWithStart:256 end:451], + [[MPPConnection alloc] initWithStart:451 end:252], + [[MPPConnection alloc] initWithStart:256 end:341], + [[MPPConnection alloc] initWithStart:341 end:452], + [[MPPConnection alloc] initWithStart:452 end:256], + [[MPPConnection alloc] initWithStart:414 end:413], + [[MPPConnection alloc] initWithStart:413 end:463], + [[MPPConnection alloc] initWithStart:463 end:414], + [[MPPConnection alloc] initWithStart:286 end:441], + [[MPPConnection alloc] initWithStart:441 end:414], + [[MPPConnection alloc] initWithStart:414 end:286], + [[MPPConnection alloc] initWithStart:286 end:258], + [[MPPConnection alloc] initWithStart:258 end:441], + [[MPPConnection alloc] initWithStart:441 end:286], + [[MPPConnection alloc] initWithStart:258 end:257], + [[MPPConnection alloc] initWithStart:257 end:442], + [[MPPConnection alloc] initWithStart:442 end:258], + [[MPPConnection alloc] initWithStart:257 end:259], + [[MPPConnection alloc] initWithStart:259 end:443], + [[MPPConnection alloc] initWithStart:443 end:257], + [[MPPConnection alloc] initWithStart:259 end:260], + [[MPPConnection alloc] initWithStart:260 end:444], + [[MPPConnection alloc] initWithStart:444 end:259], + [[MPPConnection alloc] initWithStart:260 end:467], + [[MPPConnection alloc] initWithStart:467 end:445], + [[MPPConnection alloc] initWithStart:445 end:260], + [[MPPConnection alloc] initWithStart:309 end:459], + [[MPPConnection alloc] initWithStart:459 end:250], + [[MPPConnection alloc] initWithStart:250 end:309], + [[MPPConnection alloc] initWithStart:305 end:289], + [[MPPConnection alloc] initWithStart:289 end:290], + [[MPPConnection alloc] initWithStart:290 end:305], + [[MPPConnection alloc] initWithStart:305 end:290], + [[MPPConnection alloc] initWithStart:290 end:460], + [[MPPConnection alloc] initWithStart:460 end:305], + [[MPPConnection alloc] initWithStart:401 end:376], + [[MPPConnection alloc] initWithStart:376 end:435], + [[MPPConnection alloc] initWithStart:435 end:401], + [[MPPConnection alloc] initWithStart:309 end:250], + [[MPPConnection alloc] initWithStart:250 end:392], + [[MPPConnection alloc] initWithStart:392 end:309], + [[MPPConnection alloc] initWithStart:376 end:411], + [[MPPConnection alloc] initWithStart:411 end:433], + [[MPPConnection alloc] initWithStart:433 end:376], + [[MPPConnection alloc] initWithStart:453 end:341], + [[MPPConnection alloc] initWithStart:341 end:464], + [[MPPConnection alloc] initWithStart:464 end:453], + [[MPPConnection alloc] initWithStart:357 end:453], + [[MPPConnection alloc] initWithStart:453 end:465], + [[MPPConnection alloc] initWithStart:465 end:357], + [[MPPConnection alloc] initWithStart:343 end:357], + [[MPPConnection alloc] initWithStart:357 end:412], + [[MPPConnection alloc] initWithStart:412 end:343], + [[MPPConnection alloc] initWithStart:437 end:343], + [[MPPConnection alloc] initWithStart:343 end:399], + [[MPPConnection alloc] initWithStart:399 end:437], + [[MPPConnection alloc] initWithStart:344 end:360], + [[MPPConnection alloc] initWithStart:360 end:440], + [[MPPConnection alloc] initWithStart:440 end:344], + [[MPPConnection alloc] initWithStart:420 end:437], + [[MPPConnection alloc] initWithStart:437 end:456], + [[MPPConnection alloc] initWithStart:456 end:420], + [[MPPConnection alloc] initWithStart:360 end:420], + [[MPPConnection alloc] initWithStart:420 end:363], + [[MPPConnection alloc] initWithStart:363 end:360], + [[MPPConnection alloc] initWithStart:361 end:401], + [[MPPConnection alloc] initWithStart:401 end:288], + [[MPPConnection alloc] initWithStart:288 end:361], + [[MPPConnection alloc] initWithStart:265 end:372], + [[MPPConnection alloc] initWithStart:372 end:353], + [[MPPConnection alloc] initWithStart:353 end:265], + [[MPPConnection alloc] initWithStart:390 end:339], + [[MPPConnection alloc] initWithStart:339 end:249], + [[MPPConnection alloc] initWithStart:249 end:390], + [[MPPConnection alloc] initWithStart:339 end:448], + [[MPPConnection alloc] initWithStart:448 end:255], + [[MPPConnection alloc] initWithStart:255 end:339] +]; + +NSArray *const MPPFaceConnections = [[[NSArray arrayWithArray:MPPFaceLandmarksLips] + arrayByAddingObjectsFromArray:MPPFaceLandmarksContours] + arrayByAddingObjectsFromArray:MPPFaceLandmarksTesselation]; + +NS_ASSUME_NONNULL_END From 53f0736bf08667c291b51ddcc2e3cf41b3a2e25a Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Fri, 9 Jun 2023 13:40:14 -0700 Subject: [PATCH 039/106] Add an option to disable explicit CPU sync for ExternalTextureRenderer PiperOrigin-RevId: 539166965 --- .../glutil/ExternalTextureRenderer.java | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/mediapipe/java/com/google/mediapipe/glutil/ExternalTextureRenderer.java b/mediapipe/java/com/google/mediapipe/glutil/ExternalTextureRenderer.java index 4dd35f865..381864484 100644 --- a/mediapipe/java/com/google/mediapipe/glutil/ExternalTextureRenderer.java +++ b/mediapipe/java/com/google/mediapipe/glutil/ExternalTextureRenderer.java @@ -67,6 +67,7 @@ public class ExternalTextureRenderer { private float[] textureTransformMatrix = new float[16]; private boolean flipY; private int rotation = Surface.ROTATION_0; + private boolean doExplicitCpuSync = true; /** Call this to setup the shader program before rendering. */ public void setup() { @@ -101,6 +102,14 @@ public class ExternalTextureRenderer { this.rotation = rotation; } + /** + * Configures whether the renderer should do an explicit CPU synchronization using glFinish upon + * each {@link #render} call. Defaults to true. + */ + public void setDoExplicitCpuSync(boolean doExplicitCpuSync) { + this.doExplicitCpuSync = doExplicitCpuSync; + } + /** * Renders the surfaceTexture to the framebuffer with optional vertical flip. * @@ -150,8 +159,11 @@ public class ExternalTextureRenderer { GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0); ShaderUtil.checkGlError("glBindTexture"); - // TODO: add sync and go back to glFlush() - GLES20.glFinish(); + if (doExplicitCpuSync) { + + // TODO: add sync and go back to glFlush() + GLES20.glFinish(); + } } /** From 1d4a205c2e357e9152616c5d95f0f20b4c712c29 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Fri, 9 Jun 2023 18:04:35 -0700 Subject: [PATCH 040/106] Internal change PiperOrigin-RevId: 539220863 --- mediapipe/framework/calculator_graph.cc | 4 ++++ mediapipe/framework/calculator_graph.h | 3 +++ mediapipe/framework/scheduler.cc | 4 ++-- 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/mediapipe/framework/calculator_graph.cc b/mediapipe/framework/calculator_graph.cc index b9fc4c965..2a2088c6b 100644 --- a/mediapipe/framework/calculator_graph.cc +++ b/mediapipe/framework/calculator_graph.cc @@ -839,6 +839,10 @@ absl::Status CalculatorGraph::PrepareForRun( } absl::Status CalculatorGraph::WaitUntilIdle() { + if (has_sources_) { + LOG(WARNING) << "WaitUntilIdle called on a graph with source nodes, which " + "is not fully supported at the moment."; + } MP_RETURN_IF_ERROR(scheduler_.WaitUntilIdle()); VLOG(2) << "Scheduler idle."; absl::Status status = absl::OkStatus(); diff --git a/mediapipe/framework/calculator_graph.h b/mediapipe/framework/calculator_graph.h index 354694e39..748d2fb32 100644 --- a/mediapipe/framework/calculator_graph.h +++ b/mediapipe/framework/calculator_graph.h @@ -229,8 +229,11 @@ class CalculatorGraph { // Wait until the running graph is in the idle mode, which is when nothing can // be scheduled and nothing is running in the worker threads. This function // can be called only after StartRun(). + // // NOTE: The graph must not have any source nodes because source nodes prevent // the running graph from becoming idle until the source nodes are done. + // Currently, `WaitUntilIdle` cannot be used reliably on graphs with any + // source nodes. absl::Status WaitUntilIdle(); // Wait until a packet is emitted on one of the observed output streams. diff --git a/mediapipe/framework/scheduler.cc b/mediapipe/framework/scheduler.cc index 854c10fd5..ceadce787 100644 --- a/mediapipe/framework/scheduler.cc +++ b/mediapipe/framework/scheduler.cc @@ -273,8 +273,8 @@ absl::Status Scheduler::WaitForObservedOutput() { // Idleness requires: // 1. either the graph has no source nodes or all source nodes are closed, and // 2. no packets are added to graph input streams. -// For simplicity, we only allow WaitUntilIdle() to be called on a graph with -// no source nodes. (This is enforced by CalculatorGraph::WaitUntilIdle().) +// For simplicity, we only fully support WaitUntilIdle() to be called on a graph +// with no source nodes. // The application must ensure no other threads are adding packets to graph // input streams while a WaitUntilIdle() call is in progress. absl::Status Scheduler::WaitUntilIdle() { From ac4f60a79385a78ef5af2b8505ee653862bd1c8c Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Fri, 9 Jun 2023 21:10:56 -0700 Subject: [PATCH 041/106] Annotate in model input scale for InteractiveSegmenter PiperOrigin-RevId: 539245617 --- .../cc/vision/interactive_segmenter/BUILD | 2 + .../interactive_segmenter_graph.cc | 64 ++++++++++++++++++- .../interactive_segmenter_test.cc | 47 +++++++++++--- mediapipe/tasks/testdata/vision/BUILD | 8 +++ third_party/external_files.bzl | 28 +++++++- 5 files changed, 136 insertions(+), 13 deletions(-) diff --git a/mediapipe/tasks/cc/vision/interactive_segmenter/BUILD b/mediapipe/tasks/cc/vision/interactive_segmenter/BUILD index d02b5db36..177cbf43a 100644 --- a/mediapipe/tasks/cc/vision/interactive_segmenter/BUILD +++ b/mediapipe/tasks/cc/vision/interactive_segmenter/BUILD @@ -52,6 +52,7 @@ cc_library( name = "interactive_segmenter_graph", srcs = ["interactive_segmenter_graph.cc"], deps = [ + "//mediapipe/calculators/image:image_transformation_calculator", "//mediapipe/calculators/image:set_alpha_calculator", "//mediapipe/calculators/util:annotation_overlay_calculator", "//mediapipe/calculators/util:flat_color_image_calculator", @@ -60,6 +61,7 @@ cc_library( "//mediapipe/calculators/util:to_image_calculator", "//mediapipe/framework:calculator_framework", "//mediapipe/framework/api2:builder", + "//mediapipe/framework/api2:node", "//mediapipe/framework/api2:port", "//mediapipe/framework/formats:image", "//mediapipe/framework/formats:rect_cc_proto", diff --git a/mediapipe/tasks/cc/vision/interactive_segmenter/interactive_segmenter_graph.cc b/mediapipe/tasks/cc/vision/interactive_segmenter/interactive_segmenter_graph.cc index 5bb3e8ece..5ae2792fe 100644 --- a/mediapipe/tasks/cc/vision/interactive_segmenter/interactive_segmenter_graph.cc +++ b/mediapipe/tasks/cc/vision/interactive_segmenter/interactive_segmenter_graph.cc @@ -13,12 +13,14 @@ See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ +#include #include #include "absl/status/statusor.h" #include "absl/strings/string_view.h" #include "mediapipe/calculators/util/flat_color_image_calculator.pb.h" #include "mediapipe/framework/api2/builder.h" +#include "mediapipe/framework/api2/node.h" #include "mediapipe/framework/api2/port.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/formats/image.h" @@ -35,6 +37,51 @@ namespace mediapipe { namespace tasks { namespace vision { namespace interactive_segmenter { +namespace internal { + +// A calculator to add thickness to the render data according to the image size, +// so that the render data is scale invariant to the image size. If the render +// data already has thickness, it will be kept as is. +class AddThicknessToRenderDataCalculator : public api2::Node { + public: + static constexpr api2::Input kImageIn{"IMAGE"}; + static constexpr api2::Input kRenderDataIn{ + "RENDER_DATA"}; + static constexpr api2::Output kRenderDataOut{ + "RENDER_DATA"}; + + static constexpr int kModelInputTensorWidth = 512; + static constexpr int kModelInputTensorHeight = 512; + + MEDIAPIPE_NODE_CONTRACT(kImageIn, kRenderDataIn, kRenderDataOut); + + absl::Status Process(CalculatorContext* cc) final { + mediapipe::RenderData render_data = kRenderDataIn(cc).Get(); + Image image = kImageIn(cc).Get(); + double thickness = std::max( + std::max(image.width() / static_cast(kModelInputTensorWidth), + image.height() / static_cast(kModelInputTensorHeight)), + 1.0); + + for (auto& annotation : *render_data.mutable_render_annotations()) { + if (!annotation.has_thickness()) { + annotation.set_thickness(thickness); + } + } + kRenderDataOut(cc).Send(render_data); + return absl::OkStatus(); + } +}; + +// NOLINTBEGIN: Node registration doesn't work when part of calculator name is +// moved to next line. +// clang-format off +MEDIAPIPE_REGISTER_NODE( + ::mediapipe::tasks::vision::interactive_segmenter::internal::AddThicknessToRenderDataCalculator); +// clang-format on +// NOLINTEND + +} // namespace internal namespace { @@ -59,6 +106,7 @@ constexpr absl::string_view kAlphaGpuTag{"ALPHA_GPU"}; constexpr absl::string_view kNormRectTag{"NORM_RECT"}; constexpr absl::string_view kRoiTag{"ROI"}; constexpr absl::string_view kQualityScoresTag{"QUALITY_SCORES"}; +constexpr absl::string_view kRenderDataTag{"RENDER_DATA"}; // Updates the graph to return `roi` stream which has same dimension as // `image`, and rendered with `roi`. If `use_gpu` is true, returned `Source` is @@ -69,14 +117,23 @@ Source<> RoiToAlpha(Source image, Source roi, bool use_gpu, const absl::string_view image_tag_with_suffix = use_gpu ? kImageGpuTag : kImageCpuTag; + // Adds thickness to the render data so that the render data is scale + // invariant to the input image size. + auto& add_thickness = graph.AddNode( + "mediapipe::tasks::vision::interactive_segmenter::internal::" + "AddThicknessToRenderDataCalculator"); + image >> add_thickness.In(kImageTag); + roi >> add_thickness.In(kRenderDataTag); + auto roi_with_thickness = add_thickness.Out(kRenderDataTag); + // Generates a blank canvas with same size as input image. auto& flat_color = graph.AddNode("FlatColorImageCalculator"); auto& flat_color_options = flat_color.GetOptions(); // SetAlphaCalculator only takes 1st channel. flat_color_options.mutable_color()->set_r(0); - image >> flat_color.In(kImageTag)[0]; - auto blank_canvas = flat_color.Out(kImageTag)[0]; + image >> flat_color.In(kImageTag); + auto blank_canvas = flat_color.Out(kImageTag); auto& from_mp_image = graph.AddNode("FromImageCalculator"); blank_canvas >> from_mp_image.In(kImageTag); @@ -85,7 +142,7 @@ Source<> RoiToAlpha(Source image, Source roi, bool use_gpu, auto& roi_to_alpha = graph.AddNode("AnnotationOverlayCalculator"); blank_canvas_in_cpu_or_gpu >> roi_to_alpha.In(use_gpu ? kImageGpuTag : kImageTag); - roi >> roi_to_alpha.In(0); + roi_with_thickness >> roi_to_alpha.In(0); auto alpha = roi_to_alpha.Out(use_gpu ? kImageGpuTag : kImageTag); return alpha; @@ -163,6 +220,7 @@ class InteractiveSegmenterGraph : public core::ModelTaskGraph { image >> from_mp_image.In(kImageTag); auto image_in_cpu_or_gpu = from_mp_image.Out(image_tag_with_suffix); + // Creates an RGBA image with model input tensor size. auto alpha_in_cpu_or_gpu = RoiToAlpha(image, roi, use_gpu, graph); auto& set_alpha = graph.AddNode("SetAlphaCalculator"); diff --git a/mediapipe/tasks/cc/vision/interactive_segmenter/interactive_segmenter_test.cc b/mediapipe/tasks/cc/vision/interactive_segmenter/interactive_segmenter_test.cc index 16d065f61..2bb06428e 100644 --- a/mediapipe/tasks/cc/vision/interactive_segmenter/interactive_segmenter_test.cc +++ b/mediapipe/tasks/cc/vision/interactive_segmenter/interactive_segmenter_test.cc @@ -34,6 +34,7 @@ limitations under the License. #include "mediapipe/framework/port/opencv_core_inc.h" #include "mediapipe/framework/port/opencv_imgcodecs_inc.h" #include "mediapipe/framework/port/status_matchers.h" +#include "mediapipe/framework/tool/test_util.h" #include "mediapipe/tasks/cc/components/containers/keypoint.h" #include "mediapipe/tasks/cc/components/containers/rect.h" #include "mediapipe/tasks/cc/core/proto/base_options.pb.h" @@ -70,6 +71,10 @@ constexpr absl::string_view kCatsAndDogsJpg{"cats_and_dogs.jpg"}; // Golden mask for the dogs in cats_and_dogs.jpg. constexpr absl::string_view kCatsAndDogsMaskDog1{"cats_and_dogs_mask_dog1.png"}; constexpr absl::string_view kCatsAndDogsMaskDog2{"cats_and_dogs_mask_dog2.png"}; +constexpr absl::string_view kPenguinsLarge{"penguins_large.jpg"}; +constexpr absl::string_view kPenguinsSmall{"penguins_small.jpg"}; +constexpr absl::string_view kPenguinsSmallMask{"penguins_small_mask.png"}; +constexpr absl::string_view kPenguinsLargeMask{"penguins_large_mask.png"}; constexpr float kGoldenMaskSimilarity = 0.97; @@ -183,6 +188,7 @@ struct InteractiveSegmenterTestParams { std::string test_name; RegionOfInterest::Format format; std::variant> roi; + absl::string_view input_image_file; absl::string_view golden_mask_file; float similarity_threshold; }; @@ -220,8 +226,8 @@ TEST_P(SucceedSegmentationWithRoi, SucceedsWithCategoryMask) { const InteractiveSegmenterTestParams& params = GetParam(); MP_ASSERT_OK_AND_ASSIGN( - Image image, - DecodeImageFromFile(JoinPath("./", kTestDataDirectory, kCatsAndDogsJpg))); + Image image, DecodeImageFromFile(JoinPath("./", kTestDataDirectory, + params.input_image_file))); auto options = std::make_unique(); options->base_options.model_asset_path = JoinPath("./", kTestDataDirectory, kPtmModel); @@ -244,6 +250,15 @@ TEST_P(SucceedSegmentationWithRoi, SucceedsWithCategoryMask) { EXPECT_THAT(actual_mask, SimilarToUint8Mask(expected_mask, params.similarity_threshold, kGoldenMaskMagnificationFactor)); + + cv::Mat visualized_mask; + actual_mask.convertTo(visualized_mask, CV_8UC1, /*alpha=*/255); + ImageFrame visualized_image(mediapipe::ImageFormat::GRAY8, + visualized_mask.cols, visualized_mask.rows, + visualized_mask.step, visualized_mask.data, + [visualized_mask](uint8_t[]) {}); + MP_EXPECT_OK(SavePngTestOutput( + visualized_image, absl::StrFormat("%s_category_mask", params.test_name))); } TEST_P(SucceedSegmentationWithRoi, SucceedsWithConfidenceMask) { @@ -252,8 +267,8 @@ TEST_P(SucceedSegmentationWithRoi, SucceedsWithConfidenceMask) { const InteractiveSegmenterTestParams& params = GetParam(); MP_ASSERT_OK_AND_ASSIGN( - Image image, - DecodeImageFromFile(JoinPath("./", kTestDataDirectory, kCatsAndDogsJpg))); + Image image, DecodeImageFromFile(JoinPath("./", kTestDataDirectory, + params.input_image_file))); auto options = std::make_unique(); options->base_options.model_asset_path = JoinPath("./", kTestDataDirectory, kPtmModel); @@ -275,6 +290,15 @@ TEST_P(SucceedSegmentationWithRoi, SucceedsWithConfidenceMask) { result.confidence_masks->at(1).GetImageFrameSharedPtr().get()); EXPECT_THAT(actual_mask, SimilarToFloatMask(expected_mask_float, params.similarity_threshold)); + cv::Mat visualized_mask; + actual_mask.convertTo(visualized_mask, CV_8UC1, /*alpha=*/255); + ImageFrame visualized_image(mediapipe::ImageFormat::GRAY8, + visualized_mask.cols, visualized_mask.rows, + visualized_mask.step, visualized_mask.data, + [visualized_mask](uint8_t[]) {}); + MP_EXPECT_OK(SavePngTestOutput( + visualized_image, + absl::StrFormat("%s_confidence_mask", params.test_name))); } INSTANTIATE_TEST_SUITE_P( @@ -282,21 +306,28 @@ INSTANTIATE_TEST_SUITE_P( ::testing::ValuesIn( {// Keypoint input. {"PointToDog1", RegionOfInterest::Format::kKeyPoint, - NormalizedKeypoint{0.44, 0.70}, kCatsAndDogsMaskDog1, 0.84f}, + NormalizedKeypoint{0.44, 0.70}, kCatsAndDogsJpg, kCatsAndDogsMaskDog1, + 0.84f}, {"PointToDog2", RegionOfInterest::Format::kKeyPoint, - NormalizedKeypoint{0.66, 0.66}, kCatsAndDogsMaskDog2, + NormalizedKeypoint{0.66, 0.66}, kCatsAndDogsJpg, kCatsAndDogsMaskDog2, kGoldenMaskSimilarity}, + {"PenguinsSmall", RegionOfInterest::Format::kKeyPoint, + NormalizedKeypoint{0.329, 0.545}, kPenguinsSmall, kPenguinsSmallMask, + 0.9f}, + {"PenguinsLarge", RegionOfInterest::Format::kKeyPoint, + NormalizedKeypoint{0.329, 0.545}, kPenguinsLarge, kPenguinsLargeMask, + 0.9f}, // Scribble input. {"ScribbleToDog1", RegionOfInterest::Format::kScribble, std::vector{NormalizedKeypoint{0.44, 0.70}, NormalizedKeypoint{0.44, 0.71}, NormalizedKeypoint{0.44, 0.72}}, - kCatsAndDogsMaskDog1, 0.84f}, + kCatsAndDogsJpg, kCatsAndDogsMaskDog1, 0.84f}, {"ScribbleToDog2", RegionOfInterest::Format::kScribble, std::vector{NormalizedKeypoint{0.66, 0.66}, NormalizedKeypoint{0.66, 0.67}, NormalizedKeypoint{0.66, 0.68}}, - kCatsAndDogsMaskDog2, kGoldenMaskSimilarity}}), + kCatsAndDogsJpg, kCatsAndDogsMaskDog2, kGoldenMaskSimilarity}}), [](const ::testing::TestParamInfo& info) { return info.param.test_name; }); diff --git a/mediapipe/tasks/testdata/vision/BUILD b/mediapipe/tasks/testdata/vision/BUILD index e2622a3c8..4fde58e02 100644 --- a/mediapipe/tasks/testdata/vision/BUILD +++ b/mediapipe/tasks/testdata/vision/BUILD @@ -69,6 +69,10 @@ mediapipe_files(srcs = [ "multi_objects.jpg", "multi_objects_rotated.jpg", "palm_detection_full.tflite", + "penguins_large.jpg", + "penguins_large_mask.png", + "penguins_small.jpg", + "penguins_small_mask.png", "pointing_up.jpg", "pointing_up_rotated.jpg", "portrait.jpg", @@ -135,6 +139,10 @@ filegroup( "mozart_square.jpg", "multi_objects.jpg", "multi_objects_rotated.jpg", + "penguins_large.jpg", + "penguins_large_mask.png", + "penguins_small.jpg", + "penguins_small_mask.png", "pointing_up.jpg", "pointing_up_rotated.jpg", "portrait.jpg", diff --git a/third_party/external_files.bzl b/third_party/external_files.bzl index 722ec3426..4b51d9de0 100644 --- a/third_party/external_files.bzl +++ b/third_party/external_files.bzl @@ -66,8 +66,8 @@ def external_files(): http_file( name = "com_google_mediapipe_BUILD", - sha256 = "d2b2a8346202691d7f831887c84e9642e974f64ed67851d9a58cf15c94b1f6b3", - urls = ["https://storage.googleapis.com/mediapipe-assets/BUILD?generation=1661875663693976167832357639365316787374795996401679955080207504"], + sha256 = "cfbc1404ba18ee9eb0f08e9ee66d5b51f3fac47f683a5fa0cc23b46f30e05a1f", + urls = ["https://storage.googleapis.com/mediapipe-assets/BUILD?generation=1686332366306166"], ) http_file( @@ -904,6 +904,30 @@ def external_files(): urls = ["https://storage.googleapis.com/mediapipe-assets/palm_detection_lite.tflite?generation=1661875885885770"], ) + http_file( + name = "com_google_mediapipe_penguins_large_jpg", + sha256 = "3a7a74bf946b3e2b53a3953516a552df854b2854c91b3372d2d6343497ca2160", + urls = ["https://storage.googleapis.com/mediapipe-assets/penguins_large.jpg?generation=1686332378707665"], + ) + + http_file( + name = "com_google_mediapipe_penguins_large_mask_png", + sha256 = "8f78486266dabb1a3f28bf52750c0d005f96233fe505d5e8dcba02c6ee3a13cb", + urls = ["https://storage.googleapis.com/mediapipe-assets/penguins_large_mask.png?generation=1686332381154669"], + ) + + http_file( + name = "com_google_mediapipe_penguins_small_jpg", + sha256 = "708ca356d8be4fbf5b76d4f2fcd094e97122cc24934cfcca22ac3ab0f13c4632", + urls = ["https://storage.googleapis.com/mediapipe-assets/penguins_small.jpg?generation=1686332383656645"], + ) + + http_file( + name = "com_google_mediapipe_penguins_small_mask_png", + sha256 = "65523dd7ed468ee4be3cd0cfed5badcfa41eaa5cd06444c9ab9b71b2d5951abe", + urls = ["https://storage.googleapis.com/mediapipe-assets/penguins_small_mask.png?generation=1686332385707707"], + ) + http_file( name = "com_google_mediapipe_pointing_up_jpg", sha256 = "ecf8ca2611d08fa25948a4fc10710af9120e88243a54da6356bacea17ff3e36e", From eff56045e40198c569cd43b13c77b3711b2c5579 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Mon, 12 Jun 2023 19:56:20 +0530 Subject: [PATCH 042/106] Added hand landmarker protobuf utils --- .../test/vision/hand_landmarker/utils/BUILD | 21 +++++++ .../MPPHandLandmarkerResult+ProtobufHelpers.h | 27 +++++++++ ...MPPHandLandmarkerResult+ProtobufHelpers.mm | 59 +++++++++++++++++++ 3 files changed, 107 insertions(+) create mode 100644 mediapipe/tasks/ios/test/vision/hand_landmarker/utils/BUILD create mode 100644 mediapipe/tasks/ios/test/vision/hand_landmarker/utils/sources/MPPHandLandmarkerResult+ProtobufHelpers.h create mode 100644 mediapipe/tasks/ios/test/vision/hand_landmarker/utils/sources/MPPHandLandmarkerResult+ProtobufHelpers.mm diff --git a/mediapipe/tasks/ios/test/vision/hand_landmarker/utils/BUILD b/mediapipe/tasks/ios/test/vision/hand_landmarker/utils/BUILD new file mode 100644 index 000000000..b5b215785 --- /dev/null +++ b/mediapipe/tasks/ios/test/vision/hand_landmarker/utils/BUILD @@ -0,0 +1,21 @@ +package(default_visibility = ["//mediapipe/tasks:internal"]) + +licenses(["notice"]) + +objc_library( + name = "MPPHandLandmarkerResultProtobufHelpers", + srcs = ["sources/MPPHandLandmarkerResult+ProtobufHelpers.mm"], + hdrs = ["sources/MPPHandLandmarkerResult+ProtobufHelpers.h"], + copts = [ + "-ObjC++", + "-std=c++17", + "-x objective-c++", + ], + deps = [ + "//mediapipe/tasks/ios/test/vision/utils:parse_proto_utils", + "//mediapipe/framework/formats:classification_cc_proto", + "//mediapipe/tasks/cc/components/containers/proto:landmarks_detection_result_cc_proto", + "//mediapipe/tasks/ios/vision/hand_landmarker/utils:MPPHandLandmarkerResultHelpers", + "//mediapipe/tasks/ios/common/utils:NSStringHelpers", + ], +) diff --git a/mediapipe/tasks/ios/test/vision/hand_landmarker/utils/sources/MPPHandLandmarkerResult+ProtobufHelpers.h b/mediapipe/tasks/ios/test/vision/hand_landmarker/utils/sources/MPPHandLandmarkerResult+ProtobufHelpers.h new file mode 100644 index 000000000..2e979e1fb --- /dev/null +++ b/mediapipe/tasks/ios/test/vision/hand_landmarker/utils/sources/MPPHandLandmarkerResult+ProtobufHelpers.h @@ -0,0 +1,27 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import +#import "mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarkerResult.h" + +NS_ASSUME_NONNULL_BEGIN +@interface MPPHandLandmarkerResult (ProtobufHelpers) + ++ (MPPHandLandmarkerResult *) + handLandmarkerResultFromTextEncodedProtobufFileWithName:(NSString *)fileName + shouldRemoveZPosition:(BOOL)removeZPosition; + +@end + +NS_ASSUME_NONNULL_END diff --git a/mediapipe/tasks/ios/test/vision/hand_landmarker/utils/sources/MPPHandLandmarkerResult+ProtobufHelpers.mm b/mediapipe/tasks/ios/test/vision/hand_landmarker/utils/sources/MPPHandLandmarkerResult+ProtobufHelpers.mm new file mode 100644 index 000000000..36bf6f08c --- /dev/null +++ b/mediapipe/tasks/ios/test/vision/hand_landmarker/utils/sources/MPPHandLandmarkerResult+ProtobufHelpers.mm @@ -0,0 +1,59 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import "mediapipe/tasks/ios/test/vision/hand_landmarker/utils/sources/MPPHandLandmarkerResult+ProtobufHelpers.h" + +#import "mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.h" +#import "mediapipe/tasks/ios/vision/hand_landmarker/utils/sources/MPPHandLandmarkerResult+Helpers.h" + +#include "mediapipe/framework/formats/classification.pb.h" +#include "mediapipe/tasks/cc/components/containers/proto/landmarks_detection_result.pb.h" +#include "mediapipe/tasks/ios/test/vision/utils/sources/parse_proto_utils.h" + +namespace { +using ClassificationListProto = ::mediapipe::ClassificationList; +using ClassificationProto = ::mediapipe::Classification; +using LandmarksDetectionResultProto = + ::mediapipe::tasks::containers::proto::LandmarksDetectionResult; +using ::mediapipe::tasks::ios::test::vision::utils::get_proto_from_pbtxt; +} // anonymous namespace + +@implementation MPPHandLandmarkerResult (ProtobufHelpers) + ++ (MPPHandLandmarkerResult *) + handLandmarkerResultFromTextEncodedProtobufFileWithName:(NSString *)fileName + shouldRemoveZPosition:(BOOL)removeZPosition { + LandmarksDetectionResultProto landmarkDetectionResultProto; + + if (!get_proto_from_pbtxt(fileName.cppString, landmarkDetectionResultProto).ok()) { + return nil; + } + + if (removeZPosition) { + // Remove z position of landmarks, because they are not used in correctness testing. For video + // or live stream mode, the z positions varies a lot during tracking from frame to frame. + for (int i = 0; i < landmarkDetectionResultProto.landmarks().landmark().size(); i++) { + auto &landmark = *landmarkDetectionResultProto.mutable_landmarks()->mutable_landmark(i); + landmark.clear_z(); + } + } + + return [MPPHandLandmarkerResult + handLandmarkerResultWithLandmarksProto:{landmarkDetectionResultProto.landmarks()} + worldLandmarksProto:{landmarkDetectionResultProto.world_landmarks()} + handednessProto:{landmarkDetectionResultProto.classifications()} + timestampInMilliSeconds:0]; +} + +@end From baa79046b93757ee044f8daecc0fe846d7a461ff Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Mon, 12 Jun 2023 19:56:34 +0530 Subject: [PATCH 043/106] Added iOS Objective C hand landmarker tests --- .../ios/test/vision/hand_landmarker/BUILD | 62 ++++ .../hand_landmarker/MPPHandLandmarkerTests.m | 292 ++++++++++++++++++ 2 files changed, 354 insertions(+) create mode 100644 mediapipe/tasks/ios/test/vision/hand_landmarker/BUILD create mode 100644 mediapipe/tasks/ios/test/vision/hand_landmarker/MPPHandLandmarkerTests.m diff --git a/mediapipe/tasks/ios/test/vision/hand_landmarker/BUILD b/mediapipe/tasks/ios/test/vision/hand_landmarker/BUILD new file mode 100644 index 000000000..eaa2f0642 --- /dev/null +++ b/mediapipe/tasks/ios/test/vision/hand_landmarker/BUILD @@ -0,0 +1,62 @@ +load("@build_bazel_rules_apple//apple:ios.bzl", "ios_unit_test") +load( + "//mediapipe/framework/tool:ios.bzl", + "MPP_TASK_MINIMUM_OS_VERSION", +) +load( + "@org_tensorflow//tensorflow/lite:special_rules.bzl", + "tflite_ios_lab_runner", +) + +package(default_visibility = ["//mediapipe/tasks:internal"]) + +licenses(["notice"]) + +# Default tags for filtering iOS targets. Targets are restricted to Apple platforms. +TFL_DEFAULT_TAGS = [ + "apple", +] + +# Following sanitizer tests are not supported by iOS test targets. +TFL_DISABLED_SANITIZER_TAGS = [ + "noasan", + "nomsan", + "notsan", +] + +objc_library( + name = "MPPHandLandmarkerObjcTestLibrary", + testonly = 1, + srcs = ["MPPHandLandmarkerTests.m"], + copts = [ + "-ObjC++", + "-std=c++17", + "-x objective-c++", + ], + data = [ + "//mediapipe/tasks/testdata/vision:test_models", + "//mediapipe/tasks/testdata/vision:test_images", + "//mediapipe/tasks/testdata/vision:test_protos", + ], + deps = [ + "//mediapipe/tasks/ios/common:MPPCommon", + "//mediapipe/tasks/ios/test/vision/hand_landmarker/utils:MPPHandLandmarkerResultProtobufHelpers", + "//mediapipe/tasks/ios/test/vision/utils:MPPImageTestUtils", + "//mediapipe/tasks/ios/vision/hand_landmarker:MPPHandLandmarker", + ] + select({ + "//third_party:opencv_ios_sim_arm64_source_build": ["@ios_opencv_source//:opencv_xcframework"], + "//third_party:opencv_ios_arm64_source_build": ["@ios_opencv_source//:opencv_xcframework"], + "//third_party:opencv_ios_x86_64_source_build": ["@ios_opencv_source//:opencv_xcframework"], + "//conditions:default": ["@ios_opencv//:OpencvFramework"], + }), +) + +ios_unit_test( + name = "MPPHandLandmarkerObjcTest", + minimum_os_version = MPP_TASK_MINIMUM_OS_VERSION, + runner = tflite_ios_lab_runner("IOS_LATEST"), + tags = TFL_DEFAULT_TAGS + TFL_DISABLED_SANITIZER_TAGS, + deps = [ + ":MPPHandLandmarkerObjcTestLibrary", + ], +) diff --git a/mediapipe/tasks/ios/test/vision/hand_landmarker/MPPHandLandmarkerTests.m b/mediapipe/tasks/ios/test/vision/hand_landmarker/MPPHandLandmarkerTests.m new file mode 100644 index 000000000..f9bdeb150 --- /dev/null +++ b/mediapipe/tasks/ios/test/vision/hand_landmarker/MPPHandLandmarkerTests.m @@ -0,0 +1,292 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import + +#import "mediapipe/tasks/ios/common/sources/MPPCommon.h" +#import "mediapipe/tasks/ios/test/vision/hand_landmarker/utils/sources/MPPHandLandmarkerResult+ProtobufHelpers.h" +#import "mediapipe/tasks/ios/test/vision/utils/sources/MPPImage+TestUtils.h" +#import "mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarker.h" + +static NSString *const kPbFileExtension = @"pbtxt"; + +typedef NSDictionary ResourceFileInfo; + +static ResourceFileInfo *const kHandLandmarkerBundleAssetFile = + @{@"name" : @"hand_landmarker", @"type" : @"task"}; + +static ResourceFileInfo *const kTwoHandsImage = @{@"name" : @"right_hands", @"type" : @"jpg"}; +static ResourceFileInfo *const kNoHandsImage = @{@"name" : @"cats_and_dogs", @"type" : @"jpg"}; +static ResourceFileInfo *const kThumbUpImage = @{@"name" : @"thumb_up", @"type" : @"jpg"}; +static ResourceFileInfo *const kPointingUpRotatedImage = + @{@"name" : @"pointing_up_rotated", @"type" : @"jpg"}; + +static ResourceFileInfo *const kExpectedThumbUpLandmarksFile = + @{@"name" : @"thumb_up_landmarks", @"type" : kPbFileExtension}; +static ResourceFileInfo *const kExpectedPointingUpRotatedLandmarksFile = + @{@"name" : @"pointing_up_rotated_landmarks", @"type" : kPbFileExtension}; + + +static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks"; +static const float kLandmarksErrorTolerance = 0.03f; + +#define AssertEqualErrors(error, expectedError) \ + XCTAssertNotNil(error); \ + XCTAssertEqualObjects(error.domain, expectedError.domain); \ + XCTAssertEqual(error.code, expectedError.code); \ + XCTAssertEqualObjects(error.localizedDescription, expectedError.localizedDescription) + +#define AssertApproximatelyEqualLandmarks(landmark, expectedLandmark, handIndex, landmarkIndex) \ + XCTAssertEqualWithAccuracy(landmark.x, expectedLandmark.x, kLandmarksErrorTolerance, \ + @"hand index = %d landmark index j = %d", handIndex, landmarkIndex); \ + XCTAssertEqualWithAccuracy(landmark.y, expectedLandmark.y, kLandmarksErrorTolerance, \ + @"hand index = %d landmark index j = %d", handIndex, landmarkIndex); + +#define AssertHandLandmarkerResultIsEmpty(handLandmarkerResult) \ + XCTAssertTrue(handLandmarkerResult.handedness.count == 0); \ + XCTAssertTrue(handLandmarkerResult.landmarks.count == 0); \ + XCTAssertTrue(handLandmarkerResult.worldLandmarks.count == 0); + +@interface MPPHandLandmarkerTests : XCTestCase +@end + +@implementation MPPHandLandmarkerTests + +#pragma mark Results + ++ (MPPHandLandmarkerResult *)emptyHandLandmarkerResult { + return [[MPPHandLandmarkerResult alloc] initWithLandmarks:@[] + worldLandmarks:@[] + handedness:@[] + + timestampInMilliseconds:0]; +} + ++ (MPPHandLandmarkerResult *)thumbUpHandLandmarkerResult { + NSString *filePath = + [MPPHandLandmarkerTests filePathWithFileInfo:kExpectedThumbUpLandmarksFile]; + + return [MPPHandLandmarkerResult + handLandmarkerResultFromTextEncodedProtobufFileWithName:filePath + shouldRemoveZPosition:YES]; +} + ++ (MPPHandLandmarkerResult *)pointingUpRotatedHandLandmarkerResult { + NSString *filePath = + [MPPHandLandmarkerTests filePathWithFileInfo:kExpectedPointingUpRotatedLandmarksFile]; + + return [MPPHandLandmarkerResult + handLandmarkerResultFromTextEncodedProtobufFileWithName:filePath + shouldRemoveZPosition:YES]; +} + +- (void)assertMultiHandLandmarks:(NSArray *> *)multiHandLandmarks + areApproximatelyEqualToExpectedMultiHandLandmarks: + (NSArray *> *)expectedMultiHandLandmarks { + XCTAssertEqual(multiHandLandmarks.count, expectedMultiHandLandmarks.count); + if (multiHandLandmarks.count == 0) { + return; + } + + NSArray *topHandLandmarks = multiHandLandmarks[0]; + NSArray *expectedTopHandLandmarks = expectedMultiHandLandmarks[0]; + + XCTAssertEqual(topHandLandmarks.count, expectedTopHandLandmarks.count); + for (int i = 0; i < expectedTopHandLandmarks.count; i++) { + MPPNormalizedLandmark *landmark = topHandLandmarks[i]; + XCTAssertNotNil(landmark); + AssertApproximatelyEqualLandmarks(landmark, expectedTopHandLandmarks[i], 0, i); + } +} + +- (void)assertMultiHandWorldLandmarks:(NSArray *> *)multiHandWorldLandmarks + areApproximatelyEqualToExpectedMultiHandWorldLandmarks: + (NSArray *> *)expectedMultiHandWorldLandmarks { + XCTAssertEqual(multiHandWorldLandmarks.count, expectedMultiHandWorldLandmarks.count); + if (expectedMultiHandWorldLandmarks.count == 0) { + return; + } + + NSArray *topHandWorldLandmarks = multiHandWorldLandmarks[0]; + NSArray *expectedTopHandWorldLandmarks = expectedMultiHandWorldLandmarks[0]; + + XCTAssertEqual(topHandWorldLandmarks.count, expectedTopHandWorldLandmarks.count); + for (int i = 0; i < expectedTopHandWorldLandmarks.count; i++) { + MPPLandmark *landmark = topHandWorldLandmarks[i]; + XCTAssertNotNil(landmark); + AssertApproximatelyEqualLandmarks(landmark, expectedTopHandWorldLandmarks[i], 0, i); + } +} + +- (void)assertHandLandmarkerResult:(MPPHandLandmarkerResult *)handLandmarkerResult + isApproximatelyEqualToExpectedResult: + (MPPHandLandmarkerResult *)expectedHandLandmarkerResult { + [self assertMultiHandLandmarks:handLandmarkerResult.landmarks + areApproximatelyEqualToExpectedMultiHandLandmarks:expectedHandLandmarkerResult.landmarks]; + [self assertMultiHandWorldLandmarks:handLandmarkerResult.worldLandmarks + areApproximatelyEqualToExpectedMultiHandWorldLandmarks:expectedHandLandmarkerResult + .worldLandmarks]; +} + +#pragma mark File + ++ (NSString *)filePathWithFileInfo:(ResourceFileInfo *)fileInfo { + NSString *filePath = [MPPHandLandmarkerTests filePathWithName:fileInfo[@"name"] + extension:fileInfo[@"type"]]; + return filePath; +} + ++ (NSString *)filePathWithName:(NSString *)fileName extension:(NSString *)extension { + NSString *filePath = [[NSBundle bundleForClass:self.class] pathForResource:fileName + ofType:extension]; + return filePath; +} + +#pragma mark Gesture Recognizer Initializers + +- (MPPHandLandmarkerOptions *)handLandmarkerOptionsWithModelFileInfo: + (ResourceFileInfo *)modelFileInfo { + NSString *modelPath = [MPPHandLandmarkerTests filePathWithFileInfo:modelFileInfo]; + MPPHandLandmarkerOptions *handLandmarkerOptions = + [[MPPHandLandmarkerOptions alloc] init]; + handLandmarkerOptions.baseOptions.modelAssetPath = modelPath; + + return handLandmarkerOptions; +} + +- (MPPHandLandmarker *)createHandLandmarkerWithOptionsSucceeds: + (MPPHandLandmarkerOptions *)handLandmarkerOptions { + MPPHandLandmarker *handLandmarker = + [[MPPHandLandmarker alloc] initWithOptions:handLandmarkerOptions error:nil]; + XCTAssertNotNil(handLandmarker); + + return handLandmarker; +} + +- (void)assertCreateHandLandmarkerWithOptions: + (MPPHandLandmarkerOptions *)handLandmarkerOptions + failsWithExpectedError:(NSError *)expectedError { + NSError *error = nil; + MPPHandLandmarker *handLandmarker = + [[MPPHandLandmarker alloc] initWithOptions:handLandmarkerOptions error:&error]; + + XCTAssertNil(handLandmarkerOptions); + AssertEqualErrors(error, expectedError); +} + +#pragma mark Assert Gesture Recognizer Results + +- (MPPImage *)imageWithFileInfo:(ResourceFileInfo *)fileInfo { + MPPImage *image = [MPPImage imageFromBundleWithClass:[MPPHandLandmarkerTests class] + fileName:fileInfo[@"name"] + ofType:fileInfo[@"type"]]; + XCTAssertNotNil(image); + + return image; +} + +- (MPPImage *)imageWithFileInfo:(ResourceFileInfo *)fileInfo + orientation:(UIImageOrientation)orientation { + MPPImage *image = [MPPImage imageFromBundleWithClass:[MPPHandLandmarkerTests class] + fileName:fileInfo[@"name"] + ofType:fileInfo[@"type"] + orientation:orientation]; + XCTAssertNotNil(image); + + return image; +} + +- (MPPHandLandmarkerResult *)detectInImageWithFileInfo:(ResourceFileInfo *)imageFileInfo + usingHandLandmarker: + (MPPHandLandmarker *)handLandmarker { + MPPImage *mppImage = [self imageWithFileInfo:imageFileInfo]; + MPPHandLandmarkerResult *handLandmarkerResult = [handLandmarker detectInImage:mppImage + error:nil]; + XCTAssertNotNil(handLandmarkerResult); + + return handLandmarkerResult; +} + +- (void)assertResultsOfDetectInImageWithFileInfo:(ResourceFileInfo *)fileInfo + usingHandLandmarker:(MPPHandLandmarker *)handLandmarker + approximatelyEqualsHandLandmarkerResult: + (MPPHandLandmarkerResult *)expectedHandLandmarkerResult { + MPPHandLandmarkerResult *handLandmarkerResult = + [self detectInImageWithFileInfo:fileInfo usingHandLandmarker:handLandmarker]; + [self assertHandLandmarkerResult:handLandmarkerResult + isApproximatelyEqualToExpectedResult:expectedHandLandmarkerResult]; +} + +#pragma mark General Tests + +- (void)testDetectWithModelPathSucceeds { + NSString *modelPath = + [MPPHandLandmarkerTests filePathWithFileInfo:kHandLandmarkerBundleAssetFile]; + MPPHandLandmarker *handLandmarker = + [[MPPHandLandmarker alloc] initWithModelPath:modelPath error:nil]; + XCTAssertNotNil(handLandmarker); + + [self assertResultsOfDetectInImageWithFileInfo:kThumbUpImage + usingHandLandmarker:handLandmarker + approximatelyEqualsHandLandmarkerResult:[MPPHandLandmarkerTests + thumbUpHandLandmarkerResult]]; +} + +- (void)testDetectWithEmptyResultsSucceeds { + MPPHandLandmarkerOptions *handLandmarkerOptions = + [self handLandmarkerOptionsWithModelFileInfo:kHandLandmarkerBundleAssetFile]; + + MPPHandLandmarker *handLandmarker = + [self createHandLandmarkerWithOptionsSucceeds:handLandmarkerOptions]; + + MPPHandLandmarkerResult *handLandmarkerResult = + [self detectInImageWithFileInfo:kNoHandsImage usingHandLandmarker:handLandmarker]; + AssertHandLandmarkerResultIsEmpty(handLandmarkerResult); +} + +- (void)testDetectWithNumHandsSucceeds { + MPPHandLandmarkerOptions *handLandmarkerOptions = + [self handLandmarkerOptionsWithModelFileInfo:kHandLandmarkerBundleAssetFile]; + + const NSInteger numHands = 2; + handLandmarkerOptions.numHands = numHands; + + MPPHandLandmarker *handLandmarker = + [self createHandLandmarkerWithOptionsSucceeds:handLandmarkerOptions]; + + MPPHandLandmarkerResult *handLandmarkerResult = + [self detectInImageWithFileInfo:kTwoHandsImage usingHandLandmarker:handLandmarker]; + + XCTAssertTrue(handLandmarkerResult.handedness.count == numHands); +} + +- (void)testDetectWithRotationSucceeds { + MPPHandLandmarkerOptions *handLandmarkerOptions = + [self handLandmarkerOptionsWithModelFileInfo:kHandLandmarkerBundleAssetFile]; + + MPPHandLandmarker *handLandmarker = + [self createHandLandmarkerWithOptionsSucceeds:handLandmarkerOptions]; + + MPPImage *mppImage = [self imageWithFileInfo:kPointingUpRotatedImage + orientation:UIImageOrientationRight]; + + MPPHandLandmarkerResult *handLandmarkerResult = [handLandmarker detectInImage:mppImage + error:nil]; + + [self assertHandLandmarkerResult:handLandmarkerResult + isApproximatelyEqualToExpectedResult:[MPPHandLandmarkerTests pointingUpRotatedHandLandmarkerResult]]; + +} + +@end From 8a2ec518deb57699163bd5eb7a5d2f4d54f71135 Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Mon, 12 Jun 2023 08:42:40 -0700 Subject: [PATCH 044/106] Use .mjs for ESM Modules and use .cjs for CommonJS PiperOrigin-RevId: 539664711 --- mediapipe/tasks/web/audio/BUILD | 28 +++++++++++++++++++++++++--- mediapipe/tasks/web/package.json | 7 ++++--- mediapipe/tasks/web/text/BUILD | 28 +++++++++++++++++++++++++--- mediapipe/tasks/web/vision/BUILD | 28 +++++++++++++++++++++++++--- 4 files changed, 79 insertions(+), 12 deletions(-) diff --git a/mediapipe/tasks/web/audio/BUILD b/mediapipe/tasks/web/audio/BUILD index 4dd5a2f6b..3338d17be 100644 --- a/mediapipe/tasks/web/audio/BUILD +++ b/mediapipe/tasks/web/audio/BUILD @@ -38,7 +38,7 @@ mediapipe_files(srcs = [ ]) rollup_bundle( - name = "audio_bundle", + name = "audio_bundle_mjs", config_file = "//mediapipe/tasks/web:rollup.config.mjs", entry_point = "index.ts", format = "esm", @@ -69,6 +69,29 @@ rollup_bundle( ], ) +genrule( + name = "audio_sources", + srcs = [ + ":audio_bundle_cjs", + ":audio_bundle_mjs", + ], + outs = [ + "audio_bundle.cjs", + "audio_bundle.cjs.map", + "audio_bundle.mjs", + "audio_bundle.mjs.map", + ], + cmd = ( + "for FILE in $(SRCS); do " + + " OUT_FILE=$(GENDIR)/mediapipe/tasks/web/audio/$$(" + + " basename $$FILE | sed -E 's/_([cm])js\\.js/.\\1js/'" + + " ); " + + " echo $$FILE ; echo $$OUT_FILE ; " + + " cp $$FILE $$OUT_FILE ; " + + "done;" + ), +) + genrule( name = "package_json", srcs = ["//mediapipe/tasks/web:package.json"], @@ -91,8 +114,7 @@ pkg_npm( "wasm/audio_wasm_internal.wasm", "wasm/audio_wasm_nosimd_internal.js", "wasm/audio_wasm_nosimd_internal.wasm", - ":audio_bundle", - ":audio_bundle_cjs", + ":audio_sources", ":package_json", ], ) diff --git a/mediapipe/tasks/web/package.json b/mediapipe/tasks/web/package.json index 3f495d151..4a42018f0 100644 --- a/mediapipe/tasks/web/package.json +++ b/mediapipe/tasks/web/package.json @@ -2,11 +2,12 @@ "name": "@mediapipe/tasks-__NAME__", "version": "__VERSION__", "description": "__DESCRIPTION__", - "main": "__NAME___bundle_cjs.js", - "browser": "__NAME___bundle.js", - "module": "__NAME___bundle.js", + "main": "__NAME___bundle.cjs", + "browser": "__NAME___bundle.mjs", + "module": "__NAME___bundle.mjs", "author": "mediapipe@google.com", "license": "Apache-2.0", + "type": "module", "types": "__TYPES__", "homepage": "http://mediapipe.dev", "keywords": [ "AR", "ML", "Augmented", "MediaPipe", "MediaPipe Tasks" ] diff --git a/mediapipe/tasks/web/text/BUILD b/mediapipe/tasks/web/text/BUILD index f68a8c9f5..76d875ba5 100644 --- a/mediapipe/tasks/web/text/BUILD +++ b/mediapipe/tasks/web/text/BUILD @@ -39,7 +39,7 @@ mediapipe_ts_library( ) rollup_bundle( - name = "text_bundle", + name = "text_bundle_mjs", config_file = "//mediapipe/tasks/web:rollup.config.mjs", entry_point = "index.ts", format = "esm", @@ -70,6 +70,29 @@ rollup_bundle( ], ) +genrule( + name = "text_sources", + srcs = [ + ":text_bundle_cjs", + ":text_bundle_mjs", + ], + outs = [ + "text_bundle.cjs", + "text_bundle.cjs.map", + "text_bundle.mjs", + "text_bundle.mjs.map", + ], + cmd = ( + "for FILE in $(SRCS); do " + + " OUT_FILE=$(GENDIR)/mediapipe/tasks/web/text/$$(" + + " basename $$FILE | sed -E 's/_([cm])js\\.js/.\\1js/'" + + " ); " + + " echo $$FILE ; echo $$OUT_FILE ; " + + " cp $$FILE $$OUT_FILE ; " + + "done;" + ), +) + genrule( name = "package_json", srcs = ["//mediapipe/tasks/web:package.json"], @@ -93,7 +116,6 @@ pkg_npm( "wasm/text_wasm_nosimd_internal.js", "wasm/text_wasm_nosimd_internal.wasm", ":package_json", - ":text_bundle", - ":text_bundle_cjs", + ":text_sources", ], ) diff --git a/mediapipe/tasks/web/vision/BUILD b/mediapipe/tasks/web/vision/BUILD index a7767fe53..58795b166 100644 --- a/mediapipe/tasks/web/vision/BUILD +++ b/mediapipe/tasks/web/vision/BUILD @@ -50,7 +50,7 @@ mediapipe_ts_library( ) rollup_bundle( - name = "vision_bundle", + name = "vision_bundle_mjs", config_file = "//mediapipe/tasks/web:rollup.config.mjs", entry_point = "index.ts", format = "esm", @@ -81,6 +81,29 @@ rollup_bundle( ], ) +genrule( + name = "vision_sources", + srcs = [ + ":vision_bundle_cjs", + ":vision_bundle_mjs", + ], + outs = [ + "vision_bundle.cjs", + "vision_bundle.cjs.map", + "vision_bundle.mjs", + "vision_bundle.mjs.map", + ], + cmd = ( + "for FILE in $(SRCS); do " + + " OUT_FILE=$(GENDIR)/mediapipe/tasks/web/vision/$$(" + + " basename $$FILE | sed -E 's/_([cm])js\\.js/.\\1js/'" + + " ); " + + " echo $$FILE ; echo $$OUT_FILE ; " + + " cp $$FILE $$OUT_FILE ; " + + "done;" + ), +) + genrule( name = "package_json", srcs = ["//mediapipe/tasks/web:package.json"], @@ -104,7 +127,6 @@ pkg_npm( "wasm/vision_wasm_nosimd_internal.js", "wasm/vision_wasm_nosimd_internal.wasm", ":package_json", - ":vision_bundle", - ":vision_bundle_cjs", + ":vision_sources", ], ) From fe0d1b1e8336195e81f61edf8b14a7e5a36fc192 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Mon, 12 Jun 2023 09:25:40 -0700 Subject: [PATCH 045/106] Internal change PiperOrigin-RevId: 539675912 --- .../python/metadata/flatbuffers_lib/flatbuffers_lib.cc | 4 ++-- third_party/flatbuffers/workspace.bzl | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/mediapipe/tasks/python/metadata/flatbuffers_lib/flatbuffers_lib.cc b/mediapipe/tasks/python/metadata/flatbuffers_lib/flatbuffers_lib.cc index 0c251c69e..cf6ddd9b2 100644 --- a/mediapipe/tasks/python/metadata/flatbuffers_lib/flatbuffers_lib.cc +++ b/mediapipe/tasks/python/metadata/flatbuffers_lib/flatbuffers_lib.cc @@ -41,12 +41,12 @@ PYBIND11_MODULE(_pywrap_flatbuffers, m) { self->PushFlatBuffer(reinterpret_cast(contents.c_str()), contents.length()); }); - m.def("generate_text_file", &flatbuffers::GenerateTextFile); + m.def("generate_text_file", &flatbuffers::GenTextFile); m.def("generate_text", [](const flatbuffers::Parser& parser, const std::string& buffer) -> std::string { std::string text; - const char* result = flatbuffers::GenerateText( + const char* result = flatbuffers::GenText( parser, reinterpret_cast(buffer.c_str()), &text); if (result) { return ""; diff --git a/third_party/flatbuffers/workspace.bzl b/third_party/flatbuffers/workspace.bzl index 0edb7a6f6..d06e2cbe9 100644 --- a/third_party/flatbuffers/workspace.bzl +++ b/third_party/flatbuffers/workspace.bzl @@ -5,11 +5,11 @@ load("//third_party:repo.bzl", "third_party_http_archive") def repo(): third_party_http_archive( name = "flatbuffers", - strip_prefix = "flatbuffers-23.5.8", - sha256 = "55b75dfa5b6f6173e4abf9c35284a10482ba65db886b39db511eba6c244f1e88", + strip_prefix = "flatbuffers-23.5.26", + sha256 = "1cce06b17cddd896b6d73cc047e36a254fb8df4d7ea18a46acf16c4c0cd3f3f3", urls = [ - "https://github.com/google/flatbuffers/archive/v23.5.8.tar.gz", - "https://github.com/google/flatbuffers/archive/v23.5.8.tar.gz", + "https://github.com/google/flatbuffers/archive/v23.5.26.tar.gz", + "https://github.com/google/flatbuffers/archive/v23.5.26.tar.gz", ], build_file = "//third_party/flatbuffers:BUILD.bazel", delete = ["build_defs.bzl", "BUILD.bazel"], From 96cc0fd07bb4f51eda84ce2f34cf713460a9d036 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Mon, 12 Jun 2023 11:51:34 -0700 Subject: [PATCH 046/106] Internal change PiperOrigin-RevId: 539719443 --- mediapipe/framework/BUILD | 16 + .../calculator_graph_summary_packet_test.cc | 327 ++++++++++++++++++ mediapipe/framework/timestamp.cc | 7 + mediapipe/framework/timestamp.h | 4 + mediapipe/framework/timestamp_test.cc | 16 + 5 files changed, 370 insertions(+) create mode 100644 mediapipe/framework/calculator_graph_summary_packet_test.cc diff --git a/mediapipe/framework/BUILD b/mediapipe/framework/BUILD index a7d9e0a63..86608285b 100644 --- a/mediapipe/framework/BUILD +++ b/mediapipe/framework/BUILD @@ -1355,6 +1355,22 @@ cc_test( ], ) +cc_test( + name = "calculator_graph_summary_packet_test", + srcs = ["calculator_graph_summary_packet_test.cc"], + deps = [ + ":calculator_framework", + ":packet", + "//mediapipe/framework/api2:node", + "//mediapipe/framework/api2:packet", + "//mediapipe/framework/api2:port", + "//mediapipe/framework/port:gtest_main", + "//mediapipe/framework/port:parse_text_proto", + "//mediapipe/framework/stream_handler:immediate_input_stream_handler", + "//mediapipe/framework/tool:sink", + ], +) + cc_test( name = "calculator_runner_test", size = "medium", diff --git a/mediapipe/framework/calculator_graph_summary_packet_test.cc b/mediapipe/framework/calculator_graph_summary_packet_test.cc new file mode 100644 index 000000000..c8d1e7eb7 --- /dev/null +++ b/mediapipe/framework/calculator_graph_summary_packet_test.cc @@ -0,0 +1,327 @@ +#include "mediapipe/framework/api2/node.h" +#include "mediapipe/framework/api2/packet.h" +#include "mediapipe/framework/api2/port.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/packet.h" +#include "mediapipe/framework/port/gmock.h" +#include "mediapipe/framework/port/gtest.h" +#include "mediapipe/framework/port/parse_text_proto.h" +#include "mediapipe/framework/port/status_matchers.h" + +namespace mediapipe { + +using ::mediapipe::api2::Input; +using ::mediapipe::api2::Node; +using ::mediapipe::api2::Output; +using ::testing::ElementsAre; +using ::testing::Eq; +using ::testing::IsEmpty; +using ::testing::Value; + +namespace { + +MATCHER_P2(IntPacket, value, timestamp, "") { + *result_listener << "where object is (value: " << arg.template Get() + << ", timestamp: " << arg.Timestamp() << ")"; + return Value(arg.template Get(), Eq(value)) && + Value(arg.Timestamp(), Eq(timestamp)); +} + +// Calculates and produces sum of all passed inputs when no more packets can be +// expected on the input stream. +class SummaryPacketCalculator : public Node { + public: + static constexpr Input kIn{"IN"}; + static constexpr Output kOut{"SUMMARY"}; + + MEDIAPIPE_NODE_CONTRACT(kIn, kOut); + + static absl::Status UpdateContract(CalculatorContract* cc) { + // Makes sure there are no automatic timestamp bound updates when Process + // is called. + cc->SetTimestampOffset(TimestampDiff::Unset()); + // Currently, only ImmediateInputStreamHandler supports "done" timestamp + // bound update. (ImmediateInputStreamhandler handles multiple input + // streams differently, so, in that case, calculator adjustments may be + // required.) + // TODO: update all input stream handlers to support "done" + // timestamp bound update. + cc->SetInputStreamHandler("ImmediateInputStreamHandler"); + // Enables processing timestamp bound updates. For this use case we are + // specifically interested in "done" timestamp bound update. (E.g. when + // all input packet sources are closed.) + cc->SetProcessTimestampBounds(true); + return absl::OkStatus(); + } + + absl::Status Process(CalculatorContext* cc) final { + if (!kIn(cc).IsEmpty()) { + value_ += kIn(cc).Get(); + } + + if (kOut(cc).IsClosed()) { + // This can happen: + // 1. If, during previous invocation, kIn(cc).IsDone() == true (e.g. + // source calculator finished generating packets sent to kIn) and + // HasNextAllowedInStream() == true (which is an often case). + // 2. For Timestamp::PreStream, ImmediateInputStreamHandler will still + // invoke Process() with Timestamp::Max to indicate "Done" timestamp + // bound update. + return absl::OkStatus(); + } + + // TODO: input stream holding a packet with timestamp that has + // no next timestamp allowed in stream should always result in + // InputStream::IsDone() == true. + if (kIn(cc).IsDone() || !cc->InputTimestamp().HasNextAllowedInStream()) { + // kOut(cc).Send(value_) can be used here as well, however in the case of + // source calculator sending inputs into kIn the resulting timestamp is + // not well defined (e.g. it can be the last packet timestamp or + // Timestamp::Max()) + // TODO: last packet from source should always result in + // InputStream::IsDone() == true. + kOut(cc).Send(value_, Timestamp::Max()); + kOut(cc).Close(); + } + return absl::OkStatus(); + } + + private: + int value_ = 0; +}; +MEDIAPIPE_REGISTER_NODE(SummaryPacketCalculator); + +TEST(SummaryPacketCalculatorUseCaseTest, + ProducesSummaryPacketOnClosingAllPacketSources) { + auto graph_config = ParseTextProtoOrDie(R"pb( + input_stream: 'input' + node { + calculator: "SummaryPacketCalculator" + input_stream: 'IN:input' + output_stream: 'SUMMARY:output' + } + )pb"); + std::vector output_packets; + tool::AddVectorSink("output", &graph_config, &output_packets); + + CalculatorGraph graph; + MP_ASSERT_OK(graph.Initialize(graph_config, {})); + MP_ASSERT_OK(graph.StartRun({})); + MP_ASSERT_OK(graph.WaitUntilIdle()); + EXPECT_THAT(output_packets, IsEmpty()); + + auto send_packet = [&graph](int value, Timestamp timestamp) { + MP_ASSERT_OK(graph.AddPacketToInputStream( + "input", MakePacket(value).At(timestamp))); + }; + + send_packet(10, Timestamp(10)); + MP_ASSERT_OK(graph.WaitUntilIdle()); + EXPECT_THAT(output_packets, IsEmpty()); + + send_packet(20, Timestamp(11)); + MP_ASSERT_OK(graph.WaitUntilIdle()); + EXPECT_THAT(output_packets, IsEmpty()); + + MP_ASSERT_OK(graph.CloseAllPacketSources()); + MP_ASSERT_OK(graph.WaitUntilDone()); + EXPECT_THAT(output_packets, ElementsAre(IntPacket(30, Timestamp::Max()))); +} + +TEST(SummaryPacketCalculatorUseCaseTest, ProducesSummaryPacketOnMaxTimestamp) { + auto graph_config = ParseTextProtoOrDie(R"pb( + input_stream: 'input' + node { + calculator: "SummaryPacketCalculator" + input_stream: 'IN:input' + output_stream: 'SUMMARY:output' + } + )pb"); + std::vector output_packets; + tool::AddVectorSink("output", &graph_config, &output_packets); + + CalculatorGraph graph; + MP_ASSERT_OK(graph.Initialize(graph_config, {})); + MP_ASSERT_OK(graph.StartRun({})); + MP_ASSERT_OK(graph.WaitUntilIdle()); + EXPECT_THAT(output_packets, IsEmpty()); + + auto send_packet = [&graph](int value, Timestamp timestamp) { + MP_ASSERT_OK(graph.AddPacketToInputStream( + "input", MakePacket(value).At(timestamp))); + }; + + send_packet(10, Timestamp(10)); + MP_ASSERT_OK(graph.WaitUntilIdle()); + EXPECT_THAT(output_packets, IsEmpty()); + + send_packet(20, Timestamp::Max()); + MP_ASSERT_OK(graph.WaitUntilIdle()); + EXPECT_THAT(output_packets, ElementsAre(IntPacket(30, Timestamp::Max()))); + + output_packets.clear(); + MP_ASSERT_OK(graph.CloseAllPacketSources()); + MP_ASSERT_OK(graph.WaitUntilDone()); + EXPECT_THAT(output_packets, IsEmpty()); +} + +TEST(SummaryPacketCalculatorUseCaseTest, + ProducesSummaryPacketOnPreStreamTimestamp) { + auto graph_config = ParseTextProtoOrDie(R"pb( + input_stream: 'input' + node { + calculator: "SummaryPacketCalculator" + input_stream: 'IN:input' + output_stream: 'SUMMARY:output' + } + )pb"); + std::vector output_packets; + tool::AddVectorSink("output", &graph_config, &output_packets); + + CalculatorGraph graph; + MP_ASSERT_OK(graph.Initialize(graph_config, {})); + MP_ASSERT_OK(graph.StartRun({})); + MP_ASSERT_OK(graph.WaitUntilIdle()); + EXPECT_THAT(output_packets, IsEmpty()); + + auto send_packet = [&graph](int value, Timestamp timestamp) { + MP_ASSERT_OK(graph.AddPacketToInputStream( + "input", MakePacket(value).At(timestamp))); + }; + + send_packet(10, Timestamp::PreStream()); + MP_ASSERT_OK(graph.WaitUntilIdle()); + EXPECT_THAT(output_packets, ElementsAre(IntPacket(10, Timestamp::Max()))); + + output_packets.clear(); + MP_ASSERT_OK(graph.CloseAllPacketSources()); + MP_ASSERT_OK(graph.WaitUntilDone()); + EXPECT_THAT(output_packets, IsEmpty()); +} + +TEST(SummaryPacketCalculatorUseCaseTest, + ProducesSummaryPacketOnPostStreamTimestamp) { + std::vector output_packets; + CalculatorGraphConfig graph_config = + ParseTextProtoOrDie(R"pb( + input_stream: 'input' + node { + calculator: "SummaryPacketCalculator" + input_stream: 'IN:input' + output_stream: 'SUMMARY:output' + } + )pb"); + tool::AddVectorSink("output", &graph_config, &output_packets); + + CalculatorGraph graph; + MP_ASSERT_OK(graph.Initialize(graph_config, {})); + MP_ASSERT_OK(graph.StartRun({})); + MP_ASSERT_OK(graph.WaitUntilIdle()); + EXPECT_THAT(output_packets, IsEmpty()); + + auto send_packet = [&graph](int value, Timestamp timestamp) { + MP_ASSERT_OK(graph.AddPacketToInputStream( + "input", MakePacket(value).At(timestamp))); + }; + + send_packet(10, Timestamp::PostStream()); + MP_ASSERT_OK(graph.WaitUntilIdle()); + EXPECT_THAT(output_packets, ElementsAre(IntPacket(10, Timestamp::Max()))); + + output_packets.clear(); + MP_ASSERT_OK(graph.CloseAllPacketSources()); + MP_ASSERT_OK(graph.WaitUntilDone()); + EXPECT_THAT(output_packets, IsEmpty()); +} + +class IntGeneratorCalculator : public Node { + public: + static constexpr Output kOut{"INT"}; + + MEDIAPIPE_NODE_CONTRACT(kOut); + + absl::Status Process(CalculatorContext* cc) final { + kOut(cc).Send(20, Timestamp(0)); + kOut(cc).Send(10, Timestamp(1000)); + return tool::StatusStop(); + } +}; +MEDIAPIPE_REGISTER_NODE(IntGeneratorCalculator); + +TEST(SummaryPacketCalculatorUseCaseTest, + ProducesSummaryPacketOnSourceCalculatorCompletion) { + std::vector output_packets; + CalculatorGraphConfig graph_config = + ParseTextProtoOrDie(R"pb( + node { + calculator: "IntGeneratorCalculator" + output_stream: "INT:int_value" + } + node { + calculator: "SummaryPacketCalculator" + input_stream: "IN:int_value" + output_stream: "SUMMARY:output" + } + )pb"); + tool::AddVectorSink("output", &graph_config, &output_packets); + + CalculatorGraph graph; + MP_ASSERT_OK(graph.Initialize(graph_config, {})); + MP_ASSERT_OK(graph.StartRun({})); + MP_EXPECT_OK(graph.WaitUntilDone()); + EXPECT_THAT(output_packets, ElementsAre(IntPacket(30, Timestamp::Max()))); +} + +class EmitOnCloseCalculator : public Node { + public: + static constexpr Input kIn{"IN"}; + static constexpr Output kOut{"INT"}; + + MEDIAPIPE_NODE_CONTRACT(kIn, kOut); + + absl::Status Process(CalculatorContext* cc) final { return absl::OkStatus(); } + + absl::Status Close(CalculatorContext* cc) final { + kOut(cc).Send(20, Timestamp(0)); + kOut(cc).Send(10, Timestamp(1000)); + return absl::OkStatus(); + } +}; +MEDIAPIPE_REGISTER_NODE(EmitOnCloseCalculator); + +TEST(SummaryPacketCalculatorUseCaseTest, + ProducesSummaryPacketOnAnotherCalculatorClosure) { + auto graph_config = ParseTextProtoOrDie(R"pb( + input_stream: "input" + node { + calculator: "EmitOnCloseCalculator" + input_stream: "IN:input" + output_stream: "INT:int_value" + } + node { + calculator: "SummaryPacketCalculator" + input_stream: "IN:int_value" + output_stream: "SUMMARY:output" + } + )pb"); + std::vector output_packets; + tool::AddVectorSink("output", &graph_config, &output_packets); + + CalculatorGraph graph; + MP_ASSERT_OK(graph.Initialize(graph_config, {})); + MP_ASSERT_OK(graph.StartRun({})); + MP_ASSERT_OK(graph.WaitUntilIdle()); + EXPECT_THAT(output_packets, IsEmpty()); + + MP_ASSERT_OK(graph.CloseInputStream("input")); + MP_ASSERT_OK(graph.WaitUntilIdle()); + EXPECT_THAT(output_packets, ElementsAre(IntPacket(30, Timestamp::Max()))); + + output_packets.clear(); + MP_ASSERT_OK(graph.CloseAllPacketSources()); + MP_ASSERT_OK(graph.WaitUntilDone()); + EXPECT_THAT(output_packets, IsEmpty()); +} + +} // namespace +} // namespace mediapipe diff --git a/mediapipe/framework/timestamp.cc b/mediapipe/framework/timestamp.cc index 05b69747f..4ece74c99 100644 --- a/mediapipe/framework/timestamp.cc +++ b/mediapipe/framework/timestamp.cc @@ -131,6 +131,13 @@ Timestamp Timestamp::NextAllowedInStream() const { return *this + 1; } +bool Timestamp::HasNextAllowedInStream() const { + if (*this >= Max() || *this == PreStream()) { + return false; + } + return true; +} + Timestamp Timestamp::PreviousAllowedInStream() const { if (*this <= Min() || *this == PostStream()) { // Indicates that no previous timestamps may occur. diff --git a/mediapipe/framework/timestamp.h b/mediapipe/framework/timestamp.h index 966ec1839..d125d28bb 100644 --- a/mediapipe/framework/timestamp.h +++ b/mediapipe/framework/timestamp.h @@ -186,6 +186,10 @@ class Timestamp { // CHECKs that this->IsAllowedInStream(). Timestamp NextAllowedInStream() const; + // Returns true if there's a next timestamp in the range [Min .. Max] after + // this one. + bool HasNextAllowedInStream() const; + // Returns the previous timestamp in the range [Min .. Max], or // Unstarted() if no Packets may preceed one with this timestamp. Timestamp PreviousAllowedInStream() const; diff --git a/mediapipe/framework/timestamp_test.cc b/mediapipe/framework/timestamp_test.cc index 5f5cc3428..3ba0b5c36 100644 --- a/mediapipe/framework/timestamp_test.cc +++ b/mediapipe/framework/timestamp_test.cc @@ -125,6 +125,22 @@ TEST(TimestampTest, NextAllowedInStream) { Timestamp::PostStream().NextAllowedInStream()); } +TEST(TimestampTest, HasNextAllowedInStream) { + EXPECT_TRUE(Timestamp::Min().HasNextAllowedInStream()); + EXPECT_TRUE((Timestamp::Min() + 1).HasNextAllowedInStream()); + EXPECT_TRUE(Timestamp(-1000).HasNextAllowedInStream()); + EXPECT_TRUE(Timestamp(0).HasNextAllowedInStream()); + EXPECT_TRUE(Timestamp(1000).HasNextAllowedInStream()); + EXPECT_TRUE((Timestamp::Max() - 2).HasNextAllowedInStream()); + EXPECT_TRUE((Timestamp::Max() - 1).HasNextAllowedInStream()); + + EXPECT_FALSE(Timestamp::PreStream().HasNextAllowedInStream()); + EXPECT_FALSE(Timestamp::Max().HasNextAllowedInStream()); + EXPECT_FALSE(Timestamp::PostStream().HasNextAllowedInStream()); + EXPECT_FALSE(Timestamp::OneOverPostStream().HasNextAllowedInStream()); + EXPECT_FALSE(Timestamp::Done().HasNextAllowedInStream()); +} + TEST(TimestampTest, SpecialValueDifferences) { { // Lower range const std::vector timestamps = { From b19b80e10f021978b01706d9d7530b6e19fd860e Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 13 Jun 2023 01:50:58 -0700 Subject: [PATCH 047/106] Add support for int64 constant side package value. PiperOrigin-RevId: 539893314 --- .../calculators/core/constant_side_packet_calculator.cc | 4 ++++ .../calculators/core/constant_side_packet_calculator.proto | 5 +++-- .../calculators/core/constant_side_packet_calculator_test.cc | 2 ++ 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/mediapipe/calculators/core/constant_side_packet_calculator.cc b/mediapipe/calculators/core/constant_side_packet_calculator.cc index 509f7e9dd..0bcf22ec9 100644 --- a/mediapipe/calculators/core/constant_side_packet_calculator.cc +++ b/mediapipe/calculators/core/constant_side_packet_calculator.cc @@ -87,6 +87,8 @@ class ConstantSidePacketCalculator : public CalculatorBase { packet.Set(); } else if (packet_options.has_time_series_header_value()) { packet.Set(); + } else if (packet_options.has_int64_value()) { + packet.Set(); } else { return absl::InvalidArgumentError( "None of supported values were specified in options."); @@ -124,6 +126,8 @@ class ConstantSidePacketCalculator : public CalculatorBase { } else if (packet_options.has_time_series_header_value()) { packet.Set(MakePacket( packet_options.time_series_header_value())); + } else if (packet_options.has_int64_value()) { + packet.Set(MakePacket(packet_options.int64_value())); } else { return absl::InvalidArgumentError( "None of supported values were specified in options."); diff --git a/mediapipe/calculators/core/constant_side_packet_calculator.proto b/mediapipe/calculators/core/constant_side_packet_calculator.proto index 78a773a6c..bce827055 100644 --- a/mediapipe/calculators/core/constant_side_packet_calculator.proto +++ b/mediapipe/calculators/core/constant_side_packet_calculator.proto @@ -29,13 +29,14 @@ message ConstantSidePacketCalculatorOptions { message ConstantSidePacket { oneof value { int32 int_value = 1; + uint64 uint64_value = 5; + int64 int64_value = 11; float float_value = 2; + double double_value = 9; bool bool_value = 3; string string_value = 4; - uint64 uint64_value = 5; ClassificationList classification_list_value = 6; LandmarkList landmark_list_value = 7; - double double_value = 9; TimeSeriesHeader time_series_header_value = 10; } } diff --git a/mediapipe/calculators/core/constant_side_packet_calculator_test.cc b/mediapipe/calculators/core/constant_side_packet_calculator_test.cc index a7ff808f4..6e8c0ec33 100644 --- a/mediapipe/calculators/core/constant_side_packet_calculator_test.cc +++ b/mediapipe/calculators/core/constant_side_packet_calculator_test.cc @@ -12,6 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. +#include #include #include "absl/strings/string_view.h" @@ -58,6 +59,7 @@ TEST(ConstantSidePacketCalculatorTest, EveryPossibleType) { DoTestSingleSidePacket("{ float_value: 6.5f }", 6.5f); DoTestSingleSidePacket("{ bool_value: true }", true); DoTestSingleSidePacket(R"({ string_value: "str" })", "str"); + DoTestSingleSidePacket("{ int64_value: 63 }", 63); } TEST(ConstantSidePacketCalculatorTest, MultiplePackets) { From de9acdfa6847443b1f17bf99fe37cfb52af2ddd7 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Tue, 13 Jun 2023 22:17:41 +0530 Subject: [PATCH 048/106] Added iOS segmentation mask --- mediapipe/tasks/ios/vision/core/BUILD | 16 ++ .../tasks/ios/vision/core/sources/MPPMask.h | 107 ++++++++++++ .../tasks/ios/vision/core/sources/MPPMask.mm | 157 ++++++++++++++++++ 3 files changed, 280 insertions(+) create mode 100644 mediapipe/tasks/ios/vision/core/sources/MPPMask.h create mode 100644 mediapipe/tasks/ios/vision/core/sources/MPPMask.mm diff --git a/mediapipe/tasks/ios/vision/core/BUILD b/mediapipe/tasks/ios/vision/core/BUILD index a97410e1a..7efa1e7e8 100644 --- a/mediapipe/tasks/ios/vision/core/BUILD +++ b/mediapipe/tasks/ios/vision/core/BUILD @@ -64,3 +64,19 @@ objc_library( "@com_google_absl//absl/status:statusor", ], ) + +objc_library( + name = "MPPMask", + srcs = ["sources/MPPMask.mm"], + hdrs = ["sources/MPPMask.h"], + copts = [ + "-ObjC++", + "-std=c++17", + ], + deps = [ + "//mediapipe/tasks/ios/common:MPPCommon", + "//mediapipe/tasks/ios/common/utils:MPPCommonUtils", + "//mediapipe/tasks/ios/core:MPPTaskRunner", + "//third_party/apple_frameworks:CoreVideo", + ], +) diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPMask.h b/mediapipe/tasks/ios/vision/core/sources/MPPMask.h new file mode 100644 index 000000000..37f253c63 --- /dev/null +++ b/mediapipe/tasks/ios/vision/core/sources/MPPMask.h @@ -0,0 +1,107 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import +#import + +NS_ASSUME_NONNULL_BEGIN + +/** The underlying type of the segmentation mask. */ +typedef NS_ENUM(NSUInteger, MPPMaskDataType) { + + /** Represents the native `UInt8 *` type. */ + MPPMaskDataTypeUInt8, + + /** Represents the native `float *` type. */ + MPPMaskDataTypeFloat32, + +} NS_SWIFT_NAME(MaskDataType); + +/** + * The wrapper class for MediaPipe segmentation masks. + * + * Masks are stored as `UInt8 *` or `float *` objects. + * Every mask is has an underlying type which can be accessed using `dataType`. You can access the + * mask as any other type using the appropriate properties. For eg:, if the underlying type is + * `MPPMaskDataTypeUInt8`, in addition to accessing the mask using `uint8Array`, you can access + * 'floatArray` to get the float 32 data. The first time you access the data as a type different + * from the underlying type, an expensive type conversion is performed. Subsequent accesses return a + * pointer to the memory location fo the same type converted array. As type conversions can be + * expensive, it is recommended to limit the accesses to data of types different from the underlying + * type. + * + * Masks that are returned from a MediaPipe Tasks are owned by by the underlying C++ Task. If you + * need to extend the lifetime of these objects, you can invoke the `[MPPMask copy:]` method. + */ +NS_SWIFT_NAME(Mask) +@interface MPPMask : NSObject + +/** The width of the mask. */ +@property(nonatomic, readonly) CGFloat width; + +/** The height of the mask. */ +@property(nonatomic, readonly) CGFloat height; + +/** The data type of the mask. */ +@property(nonatomic, readonly) MPPMaskDataType dataType; + +/** + * The pointer to the memory location where the underlying mask as a single channel `UInt8` array is + * stored. + */ +@property(nonatomic, readonly, assign) const UInt8 *uint8Data; + +/** + * The pointer to the memory location where the underlying mask as a single channel float 32 array + * is stored. + */ +@property(nonatomic, readonly, assign) const float *float32Data; + +/** + * Initializes an `MPPMask` object of tyep `MPPMaskDataTypeUInt8` with the given `UInt8*` data, + * width and height. + * + * @param uint8Data A pointer to the memory location of the `UInt8` data array. + * @param width The width of the mask. + * @param height The height of the mask. + * + * @return A new `MPPMask` instance with the given `UInt8*` data, width and height. + */ +- (nullable instancetype)initWithUInt8Data:(const UInt8 *)uint8Data + width:(NSInteger)width + height:(NSInteger)height NS_DESIGNATED_INITIALIZER; + +/** + * Initializes an `MPPMask` object of tyep `MPPMaskDataTypeFloat32` with the given `float*` data, + * width and height. + * + * @param uint8Data A pointer to the memory location of the `float` data array. + * @param width The width of the mask. + * @param height The height of the mask. + * + * @return A new `MPPMask` instance with the given `float*` data, width and height. + */ +- (nullable instancetype)initWithFloat32Data:(const float *)float32Data + width:(NSInteger)width + height:(NSInteger)height + error:(NSError **)error NS_DESIGNATED_INITIALIZER; + +/** Unavailable. */ +- (instancetype)init NS_UNAVAILABLE; + ++ (instancetype)new NS_UNAVAILABLE; + +@end + +NS_ASSUME_NONNULL_END diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm b/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm new file mode 100644 index 000000000..cc6332676 --- /dev/null +++ b/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm @@ -0,0 +1,157 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import "mediapipe/tasks/ios/vision/core/sources/MPPMask.h" +#import "mediapipe/tasks/ios/common/sources/MPPCommon.h" +#import "mediapipe/tasks/ios/common/utils/sources/MPPCommonUtils.h" + +namespace { +template +T *allocateDataPtr(std::unique_ptr &data, size_t length) { + data = std::unique_ptr(new T[length]); + return data.get(); +} + +template +void copyData(const T *destination, const T *source, size_t length) { + memcpy((void *)destination, source, length * sizeof(T)); +} +} // namespace + +@interface MPPMask () { + const UInt8 *_uint8Data; + const float *_float32Data; + std::unique_ptr _allocatedUInt8Data; + std::unique_ptr _allocatedFloat32Data; +} +@end + +@implementation MPPMask + +- (nullable instancetype)initWithWidth:(NSInteger)width + height:(NSInteger)height + dataType:(MPPMaskDataType)dataType + error:(NSError **)error { + if (dataType < MPPMaskDataTypeUInt8 || dataType > MPPMaskDataTypeFloat32) { + [MPPCommonUtils createCustomError:error + withCode:MPPTasksErrorCodeInvalidArgumentError + description:@"Invalid value for data type."]; + return nil; + } + + self = [super init]; + if (self) { + _width = width; + _height = height; + _dataType = dataType; + } + return self; +} + +- (nullable instancetype)initWithUInt8Data:(const UInt8 *)uint8Data + width:(NSInteger)width + height:(NSInteger)height { + self = [self initWithWidth:width height:height dataType:MPPMaskDataTypeUInt8 error:nil]; + if (self) { + _uint8Data = uint8Data; + } + return self; +} + +- (nullable instancetype)initWithFloat32Data:(const float *)float32Data + width:(NSInteger)width + height:(NSInteger)height { + self = [self initWithWidth:width height:height dataType:MPPMaskDataTypeFloat32 error:nil]; + if (self) { + _float32Data = float32Data; + } + return self; +} + +- (instancetype)initWithUInt8DataToCopy:(const UInt8 *)uint8DataToCopy + width:(NSInteger)width + height:(NSInteger)height { + self = [self initWithWidth:width height:height dataType:MPPMaskDataTypeUInt8 error:nil]; + if (self) { + _uint8Data = allocateDataPtr(_allocatedUInt8Data, _width * _height); + copyData(_uint8Data, uint8DataToCopy, _width * _height); + } + return self; +} + +- (instancetype)initWithFloat32DataToCopy:(const float *)float32DataToCopy + width:(NSInteger)width + height:(NSInteger)height { + self = [self initWithWidth:width height:height dataType:MPPMaskDataTypeFloat32 error:nil]; + if (self) { + _float32Data = allocateDataPtr(_allocatedFloat32Data, _width * _height); + copyData(_float32Data, float32DataToCopy, _width * _height); + } + return self; +} + +- (const UInt8 *)uint8Data { + switch (_dataType) { + case MPPMaskDataTypeUInt8: { + return _uint8Data; + } + case MPPMaskDataTypeFloat32: { + if (_allocatedUInt8Data) { + return _allocatedUInt8Data.get(); + } + UInt8 *data = allocateDataPtr(_allocatedUInt8Data, _width * _height); + for (int i = 0; i < _width * _height; i++) { + data[i] = _float32Data[i] * 255; + } + return data; + } + default: + return NULL; + } +} + +- (const float *)float32Data { + switch (_dataType) { + case MPPMaskDataTypeUInt8: { + if (_allocatedFloat32Data) { + return _allocatedFloat32Data.get(); + } + float *data = allocateDataPtr(_allocatedFloat32Data, _width * _height); + for (int i = 0; i < _width * _height; i++) { + data[i] = _uint8Data[i] / 255; + } + return data; + } + case MPPMaskDataTypeFloat32: { + return _float32Data; + } + default: + return NULL; + } +} + +- (id)copyWithZone:(NSZone *)zone { + switch (_dataType) { + case MPPMaskDataTypeUInt8: + return [[MPPMask alloc] initWithUInt8DataToCopy:self.uint8Data + width:self.width + height:self.height]; + case MPPMaskDataTypeFloat32: + return [[MPPMask alloc] initWithFloat32DataToCopy:self.float32Data + width:self.width + height:self.height]; + } +} + +@end From 5e2bb0e1dbe34fb7ba5018f688cb853e0a2bbaa4 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Tue, 13 Jun 2023 22:19:40 +0530 Subject: [PATCH 049/106] Updated documentation of MPPMask --- mediapipe/tasks/ios/vision/core/sources/MPPMask.h | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPMask.h b/mediapipe/tasks/ios/vision/core/sources/MPPMask.h index 37f253c63..4f16b99bd 100644 --- a/mediapipe/tasks/ios/vision/core/sources/MPPMask.h +++ b/mediapipe/tasks/ios/vision/core/sources/MPPMask.h @@ -97,6 +97,10 @@ NS_SWIFT_NAME(Mask) height:(NSInteger)height error:(NSError **)error NS_DESIGNATED_INITIALIZER; + +// TODO: Add methods for CVPixelBuffer conversion. + + /** Unavailable. */ - (instancetype)init NS_UNAVAILABLE; From e468bee58419e741d1ab4b4b2d4c47d41fa911ae Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 13 Jun 2023 09:51:42 -0700 Subject: [PATCH 050/106] Deprecate GraphStatus() PiperOrigin-RevId: 539992850 --- mediapipe/framework/calculator_context.h | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/mediapipe/framework/calculator_context.h b/mediapipe/framework/calculator_context.h index 284226d92..9568ba745 100644 --- a/mediapipe/framework/calculator_context.h +++ b/mediapipe/framework/calculator_context.h @@ -109,9 +109,20 @@ class CalculatorContext { // use OutputStream::SetOffset() directly. void SetOffset(TimestampDiff offset); - // Returns the status of the graph run. + // DEPRECATED: This was intended to get graph run status during + // `CalculatorBase::Close` call. However, `Close` can run simultaneously with + // other calculators `CalculatorBase::Process`, hence the actual graph + // status may change any time and returned graph status here does not + // necessarily reflect the actual graph status. // - // NOTE: This method should only be called during CalculatorBase::Close(). + // As an alternative, instead of checking graph status in `Close` and doing + // work for "done" state, you can enable timestamp bound processing for your + // calculator (`CalculatorContract::SetProcessTimestampBounds`) to trigger + // `Process` on timestamp bound updates and handle "done" state there. + // Check examples in: + // mediapipe/framework/calculator_graph_summary_packet_test.cc. + // + ABSL_DEPRECATED("Does not reflect the actual graph status.") absl::Status GraphStatus() const { return graph_status_; } ProfilingContext* GetProfilingContext() const { From dddbcc4449c7a4519444259f79a54e89025da661 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Tue, 13 Jun 2023 22:28:09 +0530 Subject: [PATCH 051/106] Updated data types of width and height --- mediapipe/tasks/ios/vision/core/sources/MPPMask.h | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPMask.h b/mediapipe/tasks/ios/vision/core/sources/MPPMask.h index 4f16b99bd..c8064cb8d 100644 --- a/mediapipe/tasks/ios/vision/core/sources/MPPMask.h +++ b/mediapipe/tasks/ios/vision/core/sources/MPPMask.h @@ -48,10 +48,10 @@ NS_SWIFT_NAME(Mask) @interface MPPMask : NSObject /** The width of the mask. */ -@property(nonatomic, readonly) CGFloat width; +@property(nonatomic, readonly) NSInteger width; /** The height of the mask. */ -@property(nonatomic, readonly) CGFloat height; +@property(nonatomic, readonly) NSInteger height; /** The data type of the mask. */ @property(nonatomic, readonly) MPPMaskDataType dataType; From 2cdb291e544905d85a5a50c085316dd46a53a58e Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Tue, 13 Jun 2023 22:29:15 +0530 Subject: [PATCH 052/106] Removed core video import --- mediapipe/tasks/ios/vision/core/sources/MPPMask.h | 1 - 1 file changed, 1 deletion(-) diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPMask.h b/mediapipe/tasks/ios/vision/core/sources/MPPMask.h index c8064cb8d..1e70ef452 100644 --- a/mediapipe/tasks/ios/vision/core/sources/MPPMask.h +++ b/mediapipe/tasks/ios/vision/core/sources/MPPMask.h @@ -12,7 +12,6 @@ // See the License for the specific language governing permissions and // limitations under the License. -#import #import NS_ASSUME_NONNULL_BEGIN From b97d11fa76c88cb15b2e336b4c827821a09783c2 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 13 Jun 2023 15:00:40 -0700 Subject: [PATCH 053/106] Internal MediaPipe Tasks change PiperOrigin-RevId: 540083633 --- mediapipe/calculators/tensor/BUILD | 2 -- .../tensor/bert_preprocessor_calculator.cc | 4 ++-- .../tensor/inference_interpreter_delegate_runner.cc | 13 +++++++++++++ .../tensor/regex_preprocessor_calculator.cc | 3 +-- mediapipe/framework/formats/tensor.h | 7 +++++++ mediapipe/framework/formats/tensor_test.cc | 12 ++++++++++++ 6 files changed, 35 insertions(+), 6 deletions(-) diff --git a/mediapipe/calculators/tensor/BUILD b/mediapipe/calculators/tensor/BUILD index 2ad98f28d..a3e61c063 100644 --- a/mediapipe/calculators/tensor/BUILD +++ b/mediapipe/calculators/tensor/BUILD @@ -228,7 +228,6 @@ cc_library( "//mediapipe/tasks/metadata:metadata_schema_cc", "@com_google_absl//absl/container:flat_hash_set", "@com_google_absl//absl/status", - "@com_google_absl//absl/status:statusor", "@com_google_absl//absl/strings", ], alwayslink = 1, @@ -280,7 +279,6 @@ cc_library( "//mediapipe/tasks/cc/text/tokenizers:tokenizer_utils", "//mediapipe/tasks/metadata:metadata_schema_cc", "@com_google_absl//absl/status", - "@com_google_absl//absl/status:statusor", ], alwayslink = 1, ) diff --git a/mediapipe/calculators/tensor/bert_preprocessor_calculator.cc b/mediapipe/calculators/tensor/bert_preprocessor_calculator.cc index b56122805..12db1493c 100644 --- a/mediapipe/calculators/tensor/bert_preprocessor_calculator.cc +++ b/mediapipe/calculators/tensor/bert_preprocessor_calculator.cc @@ -22,7 +22,6 @@ #include "absl/container/flat_hash_set.h" #include "absl/status/status.h" -#include "absl/status/statusor.h" #include "absl/strings/ascii.h" #include "absl/strings/string_view.h" #include "absl/strings/substitute.h" @@ -244,7 +243,8 @@ std::vector BertPreprocessorCalculator::GenerateInputTensors( input_tensors.reserve(kNumInputTensorsForBert); for (int i = 0; i < kNumInputTensorsForBert; ++i) { input_tensors.push_back( - {Tensor::ElementType::kInt32, Tensor::Shape({tensor_size})}); + {Tensor::ElementType::kInt32, + Tensor::Shape({1, tensor_size}, has_dynamic_input_tensors_)}); } std::memcpy(input_tensors[input_ids_tensor_index_] .GetCpuWriteView() diff --git a/mediapipe/calculators/tensor/inference_interpreter_delegate_runner.cc b/mediapipe/calculators/tensor/inference_interpreter_delegate_runner.cc index a2b8a9285..b727f179d 100644 --- a/mediapipe/calculators/tensor/inference_interpreter_delegate_runner.cc +++ b/mediapipe/calculators/tensor/inference_interpreter_delegate_runner.cc @@ -96,6 +96,19 @@ absl::StatusOr> InferenceInterpreterDelegateRunner::Run( CalculatorContext* cc, const std::vector& input_tensors) { // Read CPU input into tensors. RET_CHECK_EQ(interpreter_->inputs().size(), input_tensors.size()); + + // If the input tensors have dynamic shape, then the tensors need to be + // resized and reallocated before we can copy the tensor values. + bool resized_tensor_shapes = false; + for (int i = 0; i < input_tensors.size(); ++i) { + if (input_tensors[i].shape().is_dynamic) { + interpreter_->ResizeInputTensorStrict(i, input_tensors[i].shape().dims); + resized_tensor_shapes = true; + } + } + // Reallocation is needed for memory sanity. + if (resized_tensor_shapes) interpreter_->AllocateTensors(); + for (int i = 0; i < input_tensors.size(); ++i) { const TfLiteType input_tensor_type = interpreter_->tensor(interpreter_->inputs()[i])->type; diff --git a/mediapipe/calculators/tensor/regex_preprocessor_calculator.cc b/mediapipe/calculators/tensor/regex_preprocessor_calculator.cc index 92a5f0266..8276462ff 100644 --- a/mediapipe/calculators/tensor/regex_preprocessor_calculator.cc +++ b/mediapipe/calculators/tensor/regex_preprocessor_calculator.cc @@ -20,7 +20,6 @@ #include #include "absl/status/status.h" -#include "absl/status/statusor.h" #include "mediapipe/calculators/tensor/regex_preprocessor_calculator.pb.h" #include "mediapipe/framework/api2/node.h" #include "mediapipe/framework/api2/port.h" @@ -161,7 +160,7 @@ absl::Status RegexPreprocessorCalculator::Process(CalculatorContext* cc) { // not found in the tokenizer vocab. std::vector result; result.push_back( - {Tensor::ElementType::kInt32, Tensor::Shape({max_seq_len_})}); + {Tensor::ElementType::kInt32, Tensor::Shape({1, max_seq_len_})}); std::memcpy(result[0].GetCpuWriteView().buffer(), input_tokens.data(), input_tokens.size() * sizeof(int32_t)); kTensorsOut(cc).Send(std::move(result)); diff --git a/mediapipe/framework/formats/tensor.h b/mediapipe/framework/formats/tensor.h index 1d670d805..4f95eb27b 100644 --- a/mediapipe/framework/formats/tensor.h +++ b/mediapipe/framework/formats/tensor.h @@ -117,11 +117,18 @@ class Tensor { Shape() = default; Shape(std::initializer_list dimensions) : dims(dimensions) {} Shape(const std::vector& dimensions) : dims(dimensions) {} + Shape(std::initializer_list dimensions, bool is_dynamic) + : dims(dimensions), is_dynamic(is_dynamic) {} + Shape(const std::vector& dimensions, bool is_dynamic) + : dims(dimensions), is_dynamic(is_dynamic) {} int num_elements() const { return std::accumulate(dims.begin(), dims.end(), 1, std::multiplies()); } std::vector dims; + // The Tensor has dynamic rather than static shape so the TFLite interpreter + // needs to be reallocated. Only relevant for CPU. + bool is_dynamic = false; }; // Quantization parameters corresponding to the zero_point and scale value // made available by TfLite quantized (uint8/int8) tensors. diff --git a/mediapipe/framework/formats/tensor_test.cc b/mediapipe/framework/formats/tensor_test.cc index 4ad4e18eb..468af4ab9 100644 --- a/mediapipe/framework/formats/tensor_test.cc +++ b/mediapipe/framework/formats/tensor_test.cc @@ -2,6 +2,7 @@ #include #include +#include #include "mediapipe/framework/port/gmock.h" #include "mediapipe/framework/port/gtest.h" @@ -34,6 +35,17 @@ TEST(General, TestDataTypes) { EXPECT_EQ(t_bool.bytes(), t_bool.shape().num_elements() * sizeof(bool)); } +TEST(General, TestDynamic) { + Tensor t1(Tensor::ElementType::kFloat32, Tensor::Shape({1, 2, 3, 4}, true)); + EXPECT_EQ(t1.shape().num_elements(), 1 * 2 * 3 * 4); + EXPECT_TRUE(t1.shape().is_dynamic); + + std::vector t2_dims = {4, 3, 2, 3}; + Tensor t2(Tensor::ElementType::kFloat16, Tensor::Shape(t2_dims, true)); + EXPECT_EQ(t2.shape().num_elements(), 4 * 3 * 2 * 3); + EXPECT_TRUE(t2.shape().is_dynamic); +} + TEST(Cpu, TestMemoryAllocation) { Tensor t1(Tensor::ElementType::kFloat32, Tensor::Shape{4, 3, 2, 3}); auto v1 = t1.GetCpuWriteView(); From 02d55dfb0a2a4475effb4d4d3dbd280ee4a5dc0f Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 13 Jun 2023 16:18:27 -0700 Subject: [PATCH 054/106] Modify the TensorToImageFrameCalculator to support normalized outputs. PiperOrigin-RevId: 540104988 --- mediapipe/calculators/tensorflow/BUILD | 1 + .../tensor_to_image_frame_calculator.cc | 31 ++++++++++-- .../tensor_to_image_frame_calculator.proto | 4 ++ .../tensor_to_image_frame_calculator_test.cc | 50 ++++++++++++++++++- 4 files changed, 82 insertions(+), 4 deletions(-) diff --git a/mediapipe/calculators/tensorflow/BUILD b/mediapipe/calculators/tensorflow/BUILD index c4b9ab9f2..feee2372a 100644 --- a/mediapipe/calculators/tensorflow/BUILD +++ b/mediapipe/calculators/tensorflow/BUILD @@ -1077,6 +1077,7 @@ cc_test( linkstatic = 1, deps = [ ":tensor_to_image_frame_calculator", + ":tensor_to_image_frame_calculator_cc_proto", "//mediapipe/framework:calculator_framework", "//mediapipe/framework:calculator_runner", "//mediapipe/framework/formats:image_frame", diff --git a/mediapipe/calculators/tensorflow/tensor_to_image_frame_calculator.cc b/mediapipe/calculators/tensorflow/tensor_to_image_frame_calculator.cc index 34e397b32..b5a94e014 100644 --- a/mediapipe/calculators/tensorflow/tensor_to_image_frame_calculator.cc +++ b/mediapipe/calculators/tensorflow/tensor_to_image_frame_calculator.cc @@ -65,6 +65,7 @@ class TensorToImageFrameCalculator : public CalculatorBase { private: float scale_factor_; + bool scale_per_frame_min_max_; }; REGISTER_CALCULATOR(TensorToImageFrameCalculator); @@ -88,6 +89,8 @@ absl::Status TensorToImageFrameCalculator::GetContract(CalculatorContract* cc) { absl::Status TensorToImageFrameCalculator::Open(CalculatorContext* cc) { scale_factor_ = cc->Options().scale_factor(); + scale_per_frame_min_max_ = cc->Options() + .scale_per_frame_min_max(); cc->SetOffset(TimestampDiff(0)); return absl::OkStatus(); } @@ -109,16 +112,38 @@ absl::Status TensorToImageFrameCalculator::Process(CalculatorContext* cc) { auto format = (depth == 3 ? ImageFormat::SRGB : ImageFormat::GRAY8); const int32_t total_size = height * width * depth; + if (scale_per_frame_min_max_) { + RET_CHECK_EQ(input_tensor.dtype(), tensorflow::DT_FLOAT) + << "Setting scale_per_frame_min_max requires FLOAT input tensors."; + } ::std::unique_ptr output; if (input_tensor.dtype() == tensorflow::DT_FLOAT) { // Allocate buffer with alignments. std::unique_ptr buffer( new (std::align_val_t(EIGEN_MAX_ALIGN_BYTES)) uint8_t[total_size]); auto data = input_tensor.flat().data(); + float min = 1e23; + float max = -1e23; + if (scale_per_frame_min_max_) { + for (int i = 0; i < total_size; ++i) { + float d = scale_factor_ * data[i]; + if (d < min) { + min = d; + } + if (d > max) { + max = d; + } + } + } for (int i = 0; i < total_size; ++i) { - float d = scale_factor_ * data[i]; - if (d < 0) d = 0; - if (d > 255) d = 255; + float d = data[i]; + if (scale_per_frame_min_max_) { + d = 255 * (d - min) / (max - min + 1e-9); + } else { + d = scale_factor_ * d; + if (d < 0) d = 0; + if (d > 255) d = 255; + } buffer[i] = d; } output = ::absl::make_unique( diff --git a/mediapipe/calculators/tensorflow/tensor_to_image_frame_calculator.proto b/mediapipe/calculators/tensorflow/tensor_to_image_frame_calculator.proto index 3410068d0..c60448c16 100644 --- a/mediapipe/calculators/tensorflow/tensor_to_image_frame_calculator.proto +++ b/mediapipe/calculators/tensorflow/tensor_to_image_frame_calculator.proto @@ -26,4 +26,8 @@ message TensorToImageFrameCalculatorOptions { // Multiples floating point tensor outputs by this value before converting to // uint8. This is useful for converting from range [0, 1] to [0, 255] optional float scale_factor = 1 [default = 1.0]; + + // If true, scales any FLOAT tensor input of [min, max] to be between [0, 255] + // per frame. This overrides any explicit scale_factor. + optional bool scale_per_frame_min_max = 2 [default = false]; } diff --git a/mediapipe/calculators/tensorflow/tensor_to_image_frame_calculator_test.cc b/mediapipe/calculators/tensorflow/tensor_to_image_frame_calculator_test.cc index aee9fee9b..13255ac4e 100644 --- a/mediapipe/calculators/tensorflow/tensor_to_image_frame_calculator_test.cc +++ b/mediapipe/calculators/tensorflow/tensor_to_image_frame_calculator_test.cc @@ -11,7 +11,9 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +#include +#include "mediapipe/calculators/tensorflow/tensor_to_image_frame_calculator.pb.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/calculator_runner.h" #include "mediapipe/framework/formats/image_frame.h" @@ -32,11 +34,14 @@ constexpr char kImage[] = "IMAGE"; template class TensorToImageFrameCalculatorTest : public ::testing::Test { protected: - void SetUpRunner() { + void SetUpRunner(bool scale_per_frame_min_max = false) { CalculatorGraphConfig::Node config; config.set_calculator("TensorToImageFrameCalculator"); config.add_input_stream("TENSOR:input_tensor"); config.add_output_stream("IMAGE:output_image"); + config.mutable_options() + ->MutableExtension(mediapipe::TensorToImageFrameCalculatorOptions::ext) + ->set_scale_per_frame_min_max(scale_per_frame_min_max); runner_ = absl::make_unique(config); } @@ -157,4 +162,47 @@ TYPED_TEST(TensorToImageFrameCalculatorTest, } } +TYPED_TEST(TensorToImageFrameCalculatorTest, + Converts3DTensorToImageFrame2DGrayWithScaling) { + this->SetUpRunner(true); + auto& runner = this->runner_; + constexpr int kWidth = 16; + constexpr int kHeight = 8; + const tf::TensorShape tensor_shape{kHeight, kWidth}; + auto tensor = absl::make_unique( + tf::DataTypeToEnum::v(), tensor_shape); + auto tensor_vec = tensor->template flat().data(); + + // Writing sequence of integers as floats which we want normalized. + tensor_vec[0] = 255; + for (int i = 1; i < kWidth * kHeight; ++i) { + tensor_vec[i] = 200; + } + + const int64_t time = 1234; + runner->MutableInputs()->Tag(kTensor).packets.push_back( + Adopt(tensor.release()).At(Timestamp(time))); + + if (!std::is_same::value) { + EXPECT_FALSE(runner->Run().ok()); + return; // Short circuit because does not apply to other types. + } else { + EXPECT_TRUE(runner->Run().ok()); + const std::vector& output_packets = + runner->Outputs().Tag(kImage).packets; + EXPECT_EQ(1, output_packets.size()); + EXPECT_EQ(time, output_packets[0].Timestamp().Value()); + const ImageFrame& output_image = output_packets[0].Get(); + EXPECT_EQ(ImageFormat::GRAY8, output_image.Format()); + EXPECT_EQ(kWidth, output_image.Width()); + EXPECT_EQ(kHeight, output_image.Height()); + + EXPECT_EQ(255, output_image.PixelData()[0]); + for (int i = 1; i < kWidth * kHeight; ++i) { + const uint8_t pixel_value = output_image.PixelData()[i]; + ASSERT_EQ(0, pixel_value); + } + } +} + } // namespace mediapipe From 3742bc8c1b9f1325c4588f029c826495d109e25c Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 13 Jun 2023 17:10:11 -0700 Subject: [PATCH 055/106] Add metadata for all PREFIX/image... prefixes. PiperOrigin-RevId: 540117214 --- mediapipe/util/sequence/media_sequence.cc | 21 +++++++++++++++++---- 1 file changed, 17 insertions(+), 4 deletions(-) diff --git a/mediapipe/util/sequence/media_sequence.cc b/mediapipe/util/sequence/media_sequence.cc index 287db6181..21d030fff 100644 --- a/mediapipe/util/sequence/media_sequence.cc +++ b/mediapipe/util/sequence/media_sequence.cc @@ -147,6 +147,22 @@ absl::Status ReconcileMetadataImages(const std::string& prefix, return absl::OkStatus(); } +// Reconciles metadata for all images. +absl::Status ReconcileMetadataImages(tensorflow::SequenceExample* sequence) { + RET_CHECK_OK(ReconcileMetadataImages("", sequence)); + for (const auto& key_value : sequence->feature_lists().feature_list()) { + const auto& key = key_value.first; + if (::absl::StrContains(key, kImageTimestampKey)) { + std::string prefix = ""; + if (key != kImageTimestampKey) { + prefix = key.substr(0, key.size() - sizeof(kImageTimestampKey)); + } + RET_CHECK_OK(ReconcileMetadataImages(prefix, sequence)); + } + } + return absl::OkStatus(); +} + // Sets the values of "feature/${TAG}/dimensions", and // "feature/${TAG}/frame_rate" for each float list feature TAG. If the // dimensions are already present as a context feature, this method verifies @@ -545,10 +561,7 @@ absl::Status ReconcileMetadata(bool reconcile_bbox_annotations, bool reconcile_region_annotations, tensorflow::SequenceExample* sequence) { RET_CHECK_OK(ReconcileAnnotationIndicesByImageTimestamps(sequence)); - RET_CHECK_OK(ReconcileMetadataImages("", sequence)); - RET_CHECK_OK(ReconcileMetadataImages(kForwardFlowPrefix, sequence)); - RET_CHECK_OK(ReconcileMetadataImages(kClassSegmentationPrefix, sequence)); - RET_CHECK_OK(ReconcileMetadataImages(kInstanceSegmentationPrefix, sequence)); + RET_CHECK_OK(ReconcileMetadataImages(sequence)); RET_CHECK_OK(ReconcileMetadataFeatureFloats(sequence)); if (reconcile_bbox_annotations) { RET_CHECK_OK(ReconcileMetadataBoxAnnotations("", sequence)); From eaeca82b76bdaca89c2411cbec97dd3a346e3c44 Mon Sep 17 00:00:00 2001 From: Yuqi Li Date: Tue, 13 Jun 2023 18:39:14 -0700 Subject: [PATCH 056/106] Internal change PiperOrigin-RevId: 540134258 --- mediapipe/util/cpu_util.cc | 14 ++------------ 1 file changed, 2 insertions(+), 12 deletions(-) diff --git a/mediapipe/util/cpu_util.cc b/mediapipe/util/cpu_util.cc index 052eabb85..74e6debd5 100644 --- a/mediapipe/util/cpu_util.cc +++ b/mediapipe/util/cpu_util.cc @@ -26,7 +26,6 @@ #include #include "absl/algorithm/container.h" -#include "absl/flags/flag.h" #include "absl/strings/match.h" #include "absl/strings/numbers.h" #include "absl/strings/str_cat.h" @@ -35,23 +34,14 @@ #include "mediapipe/framework/port/integral_types.h" #include "mediapipe/framework/port/statusor.h" -ABSL_FLAG(std::string, system_cpu_max_freq_file, - "/sys/devices/system/cpu/cpu$0/cpufreq/cpuinfo_max_freq", - "The file pattern for CPU max frequencies, where $0 will be replaced " - "with the CPU id."); - namespace mediapipe { namespace { constexpr uint32_t kBufferLength = 64; absl::StatusOr GetFilePath(int cpu) { - if (!absl::StrContains(absl::GetFlag(FLAGS_system_cpu_max_freq_file), "$0")) { - return absl::InvalidArgumentError( - absl::StrCat("Invalid frequency file: ", - absl::GetFlag(FLAGS_system_cpu_max_freq_file))); - } - return absl::Substitute(absl::GetFlag(FLAGS_system_cpu_max_freq_file), cpu); + return absl::Substitute( + "/sys/devices/system/cpu/cpu$0/cpufreq/cpuinfo_max_freq", cpu); } absl::StatusOr GetCpuMaxFrequency(int cpu) { From 43e51c1094158f548650171c84f4b1f0bcabba20 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Wed, 14 Jun 2023 15:34:32 +0530 Subject: [PATCH 057/106] Added live stream mode tests for iOS Hand Landmarker --- .../hand_landmarker/MPPHandLandmarkerTests.m | 290 +++++++++++++++++- 1 file changed, 287 insertions(+), 3 deletions(-) diff --git a/mediapipe/tasks/ios/test/vision/hand_landmarker/MPPHandLandmarkerTests.m b/mediapipe/tasks/ios/test/vision/hand_landmarker/MPPHandLandmarkerTests.m index f9bdeb150..779bfde1f 100644 --- a/mediapipe/tasks/ios/test/vision/hand_landmarker/MPPHandLandmarkerTests.m +++ b/mediapipe/tasks/ios/test/vision/hand_landmarker/MPPHandLandmarkerTests.m @@ -58,7 +58,10 @@ static const float kLandmarksErrorTolerance = 0.03f; XCTAssertTrue(handLandmarkerResult.landmarks.count == 0); \ XCTAssertTrue(handLandmarkerResult.worldLandmarks.count == 0); -@interface MPPHandLandmarkerTests : XCTestCase +@interface MPPHandLandmarkerTests : XCTestCase { + NSDictionary *_liveStreamSucceedsTestDict; + NSDictionary *_outOfOrderTimestampTestDict; +} @end @implementation MPPHandLandmarkerTests @@ -153,7 +156,7 @@ static const float kLandmarksErrorTolerance = 0.03f; return filePath; } -#pragma mark Gesture Recognizer Initializers +#pragma mark Hand Landmarker Initializers - (MPPHandLandmarkerOptions *)handLandmarkerOptionsWithModelFileInfo: (ResourceFileInfo *)modelFileInfo { @@ -185,7 +188,7 @@ static const float kLandmarksErrorTolerance = 0.03f; AssertEqualErrors(error, expectedError); } -#pragma mark Assert Gesture Recognizer Results +#pragma mark Assert Hand Landmarker Results - (MPPImage *)imageWithFileInfo:(ResourceFileInfo *)fileInfo { MPPImage *image = [MPPImage imageFromBundleWithClass:[MPPHandLandmarkerTests class] @@ -289,4 +292,285 @@ static const float kLandmarksErrorTolerance = 0.03f; } +#pragma mark Running Mode Tests + +- (void)testCreateHandLandmarkerFailsWithDelegateInNonLiveStreamMode { + MPPRunningMode runningModesToTest[] = {MPPRunningModeImage, MPPRunningModeVideo}; + for (int i = 0; i < sizeof(runningModesToTest) / sizeof(runningModesToTest[0]); i++) { + MPPHandLandmarkerOptions *options = + [self handLandmarkerOptionsWithModelFileInfo:kHandLandmarkerBundleAssetFile]; + + options.runningMode = runningModesToTest[i]; + options.handLandmarkerLiveStreamDelegate = self; + + [self assertCreateHandLandmarkerWithOptions:options + failsWithExpectedError: + [NSError + errorWithDomain:kExpectedErrorDomain + code:MPPTasksErrorCodeInvalidArgumentError + userInfo:@{ + NSLocalizedDescriptionKey : + @"The vision task is in image or video mode. The " + @"delegate must not be set in the task's options." + }]]; + } +} + +- (void)testCreateHandLandmarkerFailsWithMissingDelegateInLiveStreamMode { + MPPHandLandmarkerOptions *options = + [self handLandmarkerOptionsWithModelFileInfo:kHandLandmarkerBundleAssetFile]; + + options.runningMode = MPPRunningModeLiveStream; + + [self + assertCreateHandLandmarkerWithOptions:options + failsWithExpectedError: + [NSError errorWithDomain:kExpectedErrorDomain + code:MPPTasksErrorCodeInvalidArgumentError + userInfo:@{ + NSLocalizedDescriptionKey : + @"The vision task is in live stream mode. An " + @"object must be set as the delegate of the task " + @"in its options to ensure asynchronous delivery " + @"of results." + }]]; +} + +- (void)testDetectFailsWithCallingWrongApiInImageMode { + MPPHandLandmarkerOptions *options = + [self handLandmarkerOptionsWithModelFileInfo:kHandLandmarkerBundleAssetFile]; + + MPPHandLandmarker *handLandmarker = + [self createHandLandmarkerWithOptionsSucceeds:options]; + + MPPImage *image = [self imageWithFileInfo:kFistImage]; + + NSError *liveStreamApiCallError; + XCTAssertFalse([handLandmarker detectAsyncInImage:image + timestampInMilliseconds:0 + error:&liveStreamApiCallError]); + + NSError *expectedLiveStreamApiCallError = + [NSError errorWithDomain:kExpectedErrorDomain + code:MPPTasksErrorCodeInvalidArgumentError + userInfo:@{ + NSLocalizedDescriptionKey : @"The vision task is not initialized with live " + @"stream mode. Current Running Mode: Image" + }]; + + AssertEqualErrors(liveStreamApiCallError, expectedLiveStreamApiCallError); + + NSError *videoApiCallError; + XCTAssertFalse([handLandmarker detectInVideoFrame:image + timestampInMilliseconds:0 + error:&videoApiCallError]); + + NSError *expectedVideoApiCallError = + [NSError errorWithDomain:kExpectedErrorDomain + code:MPPTasksErrorCodeInvalidArgumentError + userInfo:@{ + NSLocalizedDescriptionKey : @"The vision task is not initialized with " + @"video mode. Current Running Mode: Image" + }]; + AssertEqualErrors(videoApiCallError, expectedVideoApiCallError); +} + +- (void)testDetectFailsWithCallingWrongApiInVideoMode { + MPPHandLandmarkerOptions *options = + [self handLandmarkerOptionsWithModelFileInfo:kHandLandmarkerBundleAssetFile]; + options.runningMode = MPPRunningModeVideo; + + MPPHandLandmarker *handLandmarker = + [self createHandLandmarkerWithOptionsSucceeds:options]; + + MPPImage *image = [self imageWithFileInfo:kFistImage]; + + NSError *liveStreamApiCallError; + XCTAssertFalse([handLandmarker detectAsyncInImage:image + timestampInMilliseconds:0 + error:&liveStreamApiCallError]); + + NSError *expectedLiveStreamApiCallError = + [NSError errorWithDomain:kExpectedErrorDomain + code:MPPTasksErrorCodeInvalidArgumentError + userInfo:@{ + NSLocalizedDescriptionKey : @"The vision task is not initialized with live " + @"stream mode. Current Running Mode: Video" + }]; + + AssertEqualErrors(liveStreamApiCallError, expectedLiveStreamApiCallError); + + NSError *imageApiCallError; + XCTAssertFalse([handLandmarker detectInImage:image error:&imageApiCallError]); + + NSError *expectedImageApiCallError = + [NSError errorWithDomain:kExpectedErrorDomain + code:MPPTasksErrorCodeInvalidArgumentError + userInfo:@{ + NSLocalizedDescriptionKey : @"The vision task is not initialized with " + @"image mode. Current Running Mode: Video" + }]; + AssertEqualErrors(imageApiCallError, expectedImageApiCallError); +} + +- (void)testDetectFailsWithCallingWrongApiInLiveStreamMode { + MPPHandLandmarkerOptions *options = + [self handLandmarkerOptionsWithModelFileInfo:kHandLandmarkerBundleAssetFile]; + options.runningMode = MPPRunningModeLiveStream; + options.handLandmarkerLiveStreamDelegate = self; + + MPPHandLandmarker *handLandmarker = + [self createHandLandmarkerWithOptionsSucceeds:options]; + + MPPImage *image = [self imageWithFileInfo:kFistImage]; + + NSError *imageApiCallError; + XCTAssertFalse([handLandmarker detectInImage:image error:&imageApiCallError]); + + NSError *expectedImageApiCallError = + [NSError errorWithDomain:kExpectedErrorDomain + code:MPPTasksErrorCodeInvalidArgumentError + userInfo:@{ + NSLocalizedDescriptionKey : @"The vision task is not initialized with " + @"image mode. Current Running Mode: Live Stream" + }]; + AssertEqualErrors(imageApiCallError, expectedImageApiCallError); + + NSError *videoApiCallError; + XCTAssertFalse([handLandmarker detectInVideoFrame:image + timestampInMilliseconds:0 + error:&videoApiCallError]); + + NSError *expectedVideoApiCallError = + [NSError errorWithDomain:kExpectedErrorDomain + code:MPPTasksErrorCodeInvalidArgumentError + userInfo:@{ + NSLocalizedDescriptionKey : @"The vision task is not initialized with " + @"video mode. Current Running Mode: Live Stream" + }]; + AssertEqualErrors(videoApiCallError, expectedVideoApiCallError); +} + +- (void)testDetectWithVideoModeSucceeds { + MPPHandLandmarkerOptions *options = + [self handLandmarkerOptionsWithModelFileInfo:kHandLandmarkerBundleAssetFile]; + options.runningMode = MPPRunningModeVideo; + + MPPHandLandmarker *handLandmarker = + [self createHandLandmarkerWithOptionsSucceeds:options]; + + MPPImage *image = [self imageWithFileInfo:kThumbUpImage]; + + for (int i = 0; i < 3; i++) { + MPPHandLandmarkerResult *handLandmarkerResult = + [handLandmarker detectInVideoFrame:image timestampInMilliseconds:i error:nil]; + [self assertHandLandmarkerResult:handLandmarkerResult + isApproximatelyEqualToExpectedResult:[MPPHandLandmarkerTests + thumbUpHandLandmarkerResult]]; + } +} + +- (void)testDetectWithOutOfOrderTimestampsAndLiveStreamModeFails { + MPPHandLandmarkerOptions *options = + [self handLandmarkerOptionsWithModelFileInfo:kHandLandmarkerBundleAssetFile]; + options.runningMode = MPPRunningModeLiveStream; + options.handLandmarkerLiveStreamDelegate = self; + + XCTestExpectation *expectation = [[XCTestExpectation alloc] + initWithDescription:@"detectWiththOutOfOrderTimestampsAndLiveStream"]; + + expectation.expectedFulfillmentCount = 1; + + MPPHandLandmarker *handLandmarker = + [self createHandLandmarkerWithOptionsSucceeds:options]; + + _outOfOrderTimestampTestDict = @{ + kLiveStreamTestsDictHandLandmarkerKey : handLandmarker, + kLiveStreamTestsDictExpectationKey : expectation + }; + + MPPImage *image = [self imageWithFileInfo:kThumbUpImage]; + + XCTAssertTrue([handLandmarker detectAsyncInImage:image timestampInMilliseconds:1 error:nil]); + + NSError *error; + XCTAssertFalse([handLandmarker detectAsyncInImage:image + timestampInMilliseconds:0 + error:&error]); + + NSError *expectedError = + [NSError errorWithDomain:kExpectedErrorDomain + code:MPPTasksErrorCodeInvalidArgumentError + userInfo:@{ + NSLocalizedDescriptionKey : + @"INVALID_ARGUMENT: Input timestamp must be monotonically increasing." + }]; + AssertEqualErrors(error, expectedError); + + NSTimeInterval timeout = 0.5f; + [self waitForExpectations:@[ expectation ] timeout:timeout]; +} + +- (void)testDetectWithLiveStreamModeSucceeds { + MPPHandLandmarkerOptions *options = + [self handLandmarkerOptionsWithModelFileInfo:kHandLandmarkerBundleAssetFile]; + options.runningMode = MPPRunningModeLiveStream; + options.handLandmarkerLiveStreamDelegate = self; + + NSInteger iterationCount = 100; + + // Because of flow limiting, we cannot ensure that the callback will be invoked `iterationCount` + // times. An normal expectation will fail if expectation.fulfill() is not called + // `expectation.expectedFulfillmentCount` times. If `expectation.isInverted = true`, the test will + // only succeed if expectation is not fulfilled for the specified `expectedFulfillmentCount`. + // Since in our case we cannot predict how many times the expectation is supposed to be fullfilled + // setting, `expectation.expectedFulfillmentCount` = `iterationCount` + 1 and + // `expectation.isInverted = true` ensures that test succeeds ifexpectation is fullfilled <= + // `iterationCount` times. + XCTestExpectation *expectation = + [[XCTestExpectation alloc] initWithDescription:@"detectWithLiveStream"]; + + expectation.expectedFulfillmentCount = iterationCount + 1; + expectation.inverted = YES; + + MPPHandLandmarker *handLandmarker = + [self createHandLandmarkerWithOptionsSucceeds:options]; + + _liveStreamSucceedsTestDict = @{ + kLiveStreamTestsDictHandLandmarkerKey : handLandmarker, + kLiveStreamTestsDictExpectationKey : expectation + }; + + // TODO: Mimic initialization from CMSampleBuffer as live stream mode is most likely to be used + // with the iOS camera. AVCaptureVideoDataOutput sample buffer delegates provide frames of type + // `CMSampleBuffer`. + MPPImage *image = [self imageWithFileInfo:kThumbUpImage]; + + for (int i = 0; i < iterationCount; i++) { + XCTAssertTrue([handLandmarker detectAsyncInImage:image + timestampInMilliseconds:i + error:nil]); + } + + NSTimeInterval timeout = 0.5f; + [self waitForExpectations:@[ expectation ] timeout:timeout]; +} + +- (void)handLandmarker:(MPPHandLandmarker *)handLandmarker + didFinishRecognitionWithResult:(MPPHandLandmarkerResult *)handLandmarkerResult + timestampInMilliseconds:(NSInteger)timestampInMilliseconds + error:(NSError *)error { + [self assertHandLandmarkerResult:handLandmarkerResult + isApproximatelyEqualToExpectedResult:[MPPHandLandmarkerTests + thumbUpHandLandmarkerResult]]; + + if (handLandmarker == _outOfOrderTimestampTestDict[kLiveStreamTestsDictHandLandmarkerKey]) { + [_outOfOrderTimestampTestDict[kLiveStreamTestsDictExpectationKey] fulfill]; + } else if (handLandmarker == + _liveStreamSucceedsTestDict[kLiveStreamTestsDictHandLandmarkerKey]) { + [_liveStreamSucceedsTestDict[kLiveStreamTestsDictExpectationKey] fulfill]; + } + +} + @end From dffca9e3b54ebba127b870f1ee472925c107c3ee Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Wed, 14 Jun 2023 15:51:06 +0530 Subject: [PATCH 058/106] Updated protobuf helper method name in iOS Gesture Recognizer Helpers --- .../gesture_recognizer/MPPGestureRecognizerTests.m | 13 ++++++------- .../MPPGestureRecognizerResult+ProtobufHelpers.h | 8 ++++---- .../MPPGestureRecognizerResult+ProtobufHelpers.mm | 8 ++++---- 3 files changed, 14 insertions(+), 15 deletions(-) diff --git a/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m b/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m index dcd5683f7..6bbcf9b10 100644 --- a/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m +++ b/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m @@ -98,18 +98,17 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; [MPPGestureRecognizerTests filePathWithFileInfo:kExpectedThumbUpLandmarksFile]; return [MPPGestureRecognizerResult - gestureRecognizerResultsFromTextEncodedProtobufFileWithName:filePath - gestureLabel:kExpectedThumbUpLabel - shouldRemoveZPosition:YES]; + gestureRecognizerResultsFromProtobufFileWithName:filePath + gestureLabel:kExpectedThumbUpLabel + shouldRemoveZPosition:YES]; } + (MPPGestureRecognizerResult *)fistGestureRecognizerResultWithLabel:(NSString *)gestureLabel { NSString *filePath = [MPPGestureRecognizerTests filePathWithFileInfo:kExpectedFistLandmarksFile]; - return [MPPGestureRecognizerResult - gestureRecognizerResultsFromTextEncodedProtobufFileWithName:filePath - gestureLabel:gestureLabel - shouldRemoveZPosition:YES]; + return [MPPGestureRecognizerResult gestureRecognizerResultsFromProtobufFileWithName:filePath + gestureLabel:gestureLabel + shouldRemoveZPosition:YES]; } #pragma mark Assert Gesture Recognizer Results diff --git a/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtobufHelpers.h b/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtobufHelpers.h index cfa0a5e53..069b90b99 100644 --- a/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtobufHelpers.h +++ b/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtobufHelpers.h @@ -1,4 +1,4 @@ -// Copyright 2022 The MediaPipe Authors. +// Copyright 2023 The MediaPipe Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -19,9 +19,9 @@ NS_ASSUME_NONNULL_BEGIN @interface MPPGestureRecognizerResult (ProtobufHelpers) + (MPPGestureRecognizerResult *) - gestureRecognizerResultsFromTextEncodedProtobufFileWithName:(NSString *)fileName - gestureLabel:(NSString *)gestureLabel - shouldRemoveZPosition:(BOOL)removeZPosition; + gestureRecognizerResultsFromProtobufFileWithName:(NSString *)fileName + gestureLabel:(NSString *)gestureLabel + shouldRemoveZPosition:(BOOL)removeZPosition; @end diff --git a/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtobufHelpers.mm b/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtobufHelpers.mm index f628499d5..28e5628ff 100644 --- a/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtobufHelpers.mm +++ b/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtobufHelpers.mm @@ -31,10 +31,10 @@ using ::mediapipe::tasks::ios::test::vision::utils::get_proto_from_pbtxt; @implementation MPPGestureRecognizerResult (ProtobufHelpers) -+ (MPPGestureRecognizerResult *) - gestureRecognizerResultsFromTextEncodedProtobufFileWithName:(NSString *)fileName - gestureLabel:(NSString *)gestureLabel - shouldRemoveZPosition:(BOOL)removeZPosition { ++ (MPPGestureRecognizerResult *)gestureRecognizerResultsProtobufFileWithName:(NSString *)fileName + gestureLabel: + (NSString *)gestureLabel + shouldRemoveZPosition:(BOOL)removeZPosition { LandmarksDetectionResultProto landmarkDetectionResultProto; if (!get_proto_from_pbtxt(fileName.cppString, landmarkDetectionResultProto).ok()) { From 0ae27fad373edff0e5602fc0204e50aba65d436a Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Wed, 14 Jun 2023 15:51:41 +0530 Subject: [PATCH 059/106] Updated iOS hand landmarker tests --- .../vision/hand_landmarker/MPPHandLandmarkerTests.m | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/mediapipe/tasks/ios/test/vision/hand_landmarker/MPPHandLandmarkerTests.m b/mediapipe/tasks/ios/test/vision/hand_landmarker/MPPHandLandmarkerTests.m index 9f605411b..eec13d450 100644 --- a/mediapipe/tasks/ios/test/vision/hand_landmarker/MPPHandLandmarkerTests.m +++ b/mediapipe/tasks/ios/test/vision/hand_landmarker/MPPHandLandmarkerTests.m @@ -40,6 +40,9 @@ static ResourceFileInfo *const kExpectedPointingUpRotatedLandmarksFile = static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks"; static const float kLandmarksErrorTolerance = 0.03f; +static NSString *const kLiveStreamTestsDictHandLandmarkerKey = @"gesture_recognizer"; +static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; + #define AssertEqualErrors(error, expectedError) \ XCTAssertNotNil(error); \ XCTAssertEqualObjects(error.domain, expectedError.domain); \ @@ -57,7 +60,7 @@ static const float kLandmarksErrorTolerance = 0.03f; XCTAssertTrue(handLandmarkerResult.landmarks.count == 0); \ XCTAssertTrue(handLandmarkerResult.worldLandmarks.count == 0); -@interface MPPHandLandmarkerTests : XCTestCase { +@interface MPPHandLandmarkerTests : XCTestCase { NSDictionary *_liveStreamSucceedsTestDict; NSDictionary *_outOfOrderTimestampTestDict; } @@ -335,7 +338,7 @@ static const float kLandmarksErrorTolerance = 0.03f; MPPHandLandmarker *handLandmarker = [self createHandLandmarkerWithOptionsSucceeds:options]; - MPPImage *image = [self imageWithFileInfo:kFistImage]; + MPPImage *image = [self imageWithFileInfo:kThumbUpImage]; NSError *liveStreamApiCallError; XCTAssertFalse([handLandmarker detectAsyncInImage:image @@ -375,7 +378,7 @@ static const float kLandmarksErrorTolerance = 0.03f; MPPHandLandmarker *handLandmarker = [self createHandLandmarkerWithOptionsSucceeds:options]; - MPPImage *image = [self imageWithFileInfo:kFistImage]; + MPPImage *image = [self imageWithFileInfo:kThumbUpImage]; NSError *liveStreamApiCallError; XCTAssertFalse([handLandmarker detectAsyncInImage:image @@ -414,7 +417,7 @@ static const float kLandmarksErrorTolerance = 0.03f; MPPHandLandmarker *handLandmarker = [self createHandLandmarkerWithOptionsSucceeds:options]; - MPPImage *image = [self imageWithFileInfo:kFistImage]; + MPPImage *image = [self imageWithFileInfo:kThumbUpImage]; NSError *imageApiCallError; XCTAssertFalse([handLandmarker detectInImage:image error:&imageApiCallError]); @@ -549,7 +552,7 @@ static const float kLandmarksErrorTolerance = 0.03f; } - (void)handLandmarker:(MPPHandLandmarker *)handLandmarker - didFinishRecognitionWithResult:(MPPHandLandmarkerResult *)handLandmarkerResult + didFinishDetectionWithResult:(MPPHandLandmarkerResult *)handLandmarkerResult timestampInMilliseconds:(NSInteger)timestampInMilliseconds error:(NSError *)error { [self assertHandLandmarkerResult:handLandmarkerResult From 94a9464750caba0a18f1d75818068d20bebaa602 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Wed, 14 Jun 2023 15:52:26 +0530 Subject: [PATCH 060/106] Fixed formatting in MPPHandLandmarkerTests.m --- .../hand_landmarker/MPPHandLandmarkerTests.m | 94 ++++++++----------- 1 file changed, 40 insertions(+), 54 deletions(-) diff --git a/mediapipe/tasks/ios/test/vision/hand_landmarker/MPPHandLandmarkerTests.m b/mediapipe/tasks/ios/test/vision/hand_landmarker/MPPHandLandmarkerTests.m index eec13d450..36ad2ba9d 100644 --- a/mediapipe/tasks/ios/test/vision/hand_landmarker/MPPHandLandmarkerTests.m +++ b/mediapipe/tasks/ios/test/vision/hand_landmarker/MPPHandLandmarkerTests.m @@ -167,7 +167,7 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; - (MPPHandLandmarker *)createHandLandmarkerWithOptionsSucceeds: (MPPHandLandmarkerOptions *)handLandmarkerOptions { - NSError* error; + NSError *error; MPPHandLandmarker *handLandmarker = [[MPPHandLandmarker alloc] initWithOptions:handLandmarkerOptions error:&error]; XCTAssertNotNil(handLandmarker); @@ -298,10 +298,10 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; options.runningMode = runningModesToTest[i]; options.handLandmarkerLiveStreamDelegate = self; - [self assertCreateHandLandmarkerWithOptions:options - failsWithExpectedError: - [NSError - errorWithDomain:kExpectedErrorDomain + [self + assertCreateHandLandmarkerWithOptions:options + failsWithExpectedError: + [NSError errorWithDomain:kExpectedErrorDomain code:MPPTasksErrorCodeInvalidArgumentError userInfo:@{ NSLocalizedDescriptionKey : @@ -317,33 +317,31 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; options.runningMode = MPPRunningModeLiveStream; - [self - assertCreateHandLandmarkerWithOptions:options - failsWithExpectedError: - [NSError errorWithDomain:kExpectedErrorDomain - code:MPPTasksErrorCodeInvalidArgumentError - userInfo:@{ - NSLocalizedDescriptionKey : - @"The vision task is in live stream mode. An " - @"object must be set as the delegate of the task " - @"in its options to ensure asynchronous delivery " - @"of results." - }]]; + [self assertCreateHandLandmarkerWithOptions:options + failsWithExpectedError: + [NSError errorWithDomain:kExpectedErrorDomain + code:MPPTasksErrorCodeInvalidArgumentError + userInfo:@{ + NSLocalizedDescriptionKey : + @"The vision task is in live stream mode. An " + @"object must be set as the delegate of the task " + @"in its options to ensure asynchronous delivery " + @"of results." + }]]; } - (void)testDetectFailsWithCallingWrongApiInImageMode { MPPHandLandmarkerOptions *options = [self handLandmarkerOptionsWithModelFileInfo:kHandLandmarkerBundleAssetFile]; - MPPHandLandmarker *handLandmarker = - [self createHandLandmarkerWithOptionsSucceeds:options]; + MPPHandLandmarker *handLandmarker = [self createHandLandmarkerWithOptionsSucceeds:options]; MPPImage *image = [self imageWithFileInfo:kThumbUpImage]; NSError *liveStreamApiCallError; XCTAssertFalse([handLandmarker detectAsyncInImage:image - timestampInMilliseconds:0 - error:&liveStreamApiCallError]); + timestampInMilliseconds:0 + error:&liveStreamApiCallError]); NSError *expectedLiveStreamApiCallError = [NSError errorWithDomain:kExpectedErrorDomain @@ -357,8 +355,8 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; NSError *videoApiCallError; XCTAssertFalse([handLandmarker detectInVideoFrame:image - timestampInMilliseconds:0 - error:&videoApiCallError]); + timestampInMilliseconds:0 + error:&videoApiCallError]); NSError *expectedVideoApiCallError = [NSError errorWithDomain:kExpectedErrorDomain @@ -375,15 +373,14 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; [self handLandmarkerOptionsWithModelFileInfo:kHandLandmarkerBundleAssetFile]; options.runningMode = MPPRunningModeVideo; - MPPHandLandmarker *handLandmarker = - [self createHandLandmarkerWithOptionsSucceeds:options]; + MPPHandLandmarker *handLandmarker = [self createHandLandmarkerWithOptionsSucceeds:options]; MPPImage *image = [self imageWithFileInfo:kThumbUpImage]; NSError *liveStreamApiCallError; XCTAssertFalse([handLandmarker detectAsyncInImage:image - timestampInMilliseconds:0 - error:&liveStreamApiCallError]); + timestampInMilliseconds:0 + error:&liveStreamApiCallError]); NSError *expectedLiveStreamApiCallError = [NSError errorWithDomain:kExpectedErrorDomain @@ -414,8 +411,7 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; options.runningMode = MPPRunningModeLiveStream; options.handLandmarkerLiveStreamDelegate = self; - MPPHandLandmarker *handLandmarker = - [self createHandLandmarkerWithOptionsSucceeds:options]; + MPPHandLandmarker *handLandmarker = [self createHandLandmarkerWithOptionsSucceeds:options]; MPPImage *image = [self imageWithFileInfo:kThumbUpImage]; @@ -433,8 +429,8 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; NSError *videoApiCallError; XCTAssertFalse([handLandmarker detectInVideoFrame:image - timestampInMilliseconds:0 - error:&videoApiCallError]); + timestampInMilliseconds:0 + error:&videoApiCallError]); NSError *expectedVideoApiCallError = [NSError errorWithDomain:kExpectedErrorDomain @@ -451,17 +447,16 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; [self handLandmarkerOptionsWithModelFileInfo:kHandLandmarkerBundleAssetFile]; options.runningMode = MPPRunningModeVideo; - MPPHandLandmarker *handLandmarker = - [self createHandLandmarkerWithOptionsSucceeds:options]; + MPPHandLandmarker *handLandmarker = [self createHandLandmarkerWithOptionsSucceeds:options]; MPPImage *image = [self imageWithFileInfo:kThumbUpImage]; for (int i = 0; i < 3; i++) { - MPPHandLandmarkerResult *handLandmarkerResult = - [handLandmarker detectInVideoFrame:image timestampInMilliseconds:i error:nil]; + MPPHandLandmarkerResult *handLandmarkerResult = [handLandmarker detectInVideoFrame:image + timestampInMilliseconds:i + error:nil]; [self assertHandLandmarkerResult:handLandmarkerResult - isApproximatelyEqualToExpectedResult:[MPPHandLandmarkerTests - thumbUpHandLandmarkerResult]]; + isApproximatelyEqualToExpectedResult:[MPPHandLandmarkerTests thumbUpHandLandmarkerResult]]; } } @@ -476,8 +471,7 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; expectation.expectedFulfillmentCount = 1; - MPPHandLandmarker *handLandmarker = - [self createHandLandmarkerWithOptionsSucceeds:options]; + MPPHandLandmarker *handLandmarker = [self createHandLandmarkerWithOptionsSucceeds:options]; _outOfOrderTimestampTestDict = @{ kLiveStreamTestsDictHandLandmarkerKey : handLandmarker, @@ -489,9 +483,7 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; XCTAssertTrue([handLandmarker detectAsyncInImage:image timestampInMilliseconds:1 error:nil]); NSError *error; - XCTAssertFalse([handLandmarker detectAsyncInImage:image - timestampInMilliseconds:0 - error:&error]); + XCTAssertFalse([handLandmarker detectAsyncInImage:image timestampInMilliseconds:0 error:&error]); NSError *expectedError = [NSError errorWithDomain:kExpectedErrorDomain @@ -528,8 +520,7 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; expectation.expectedFulfillmentCount = iterationCount + 1; expectation.inverted = YES; - MPPHandLandmarker *handLandmarker = - [self createHandLandmarkerWithOptionsSucceeds:options]; + MPPHandLandmarker *handLandmarker = [self createHandLandmarkerWithOptionsSucceeds:options]; _liveStreamSucceedsTestDict = @{ kLiveStreamTestsDictHandLandmarkerKey : handLandmarker, @@ -542,9 +533,7 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; MPPImage *image = [self imageWithFileInfo:kThumbUpImage]; for (int i = 0; i < iterationCount; i++) { - XCTAssertTrue([handLandmarker detectAsyncInImage:image - timestampInMilliseconds:i - error:nil]); + XCTAssertTrue([handLandmarker detectAsyncInImage:image timestampInMilliseconds:i error:nil]); } NSTimeInterval timeout = 0.5f; @@ -553,19 +542,16 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; - (void)handLandmarker:(MPPHandLandmarker *)handLandmarker didFinishDetectionWithResult:(MPPHandLandmarkerResult *)handLandmarkerResult - timestampInMilliseconds:(NSInteger)timestampInMilliseconds - error:(NSError *)error { + timestampInMilliseconds:(NSInteger)timestampInMilliseconds + error:(NSError *)error { [self assertHandLandmarkerResult:handLandmarkerResult - isApproximatelyEqualToExpectedResult:[MPPHandLandmarkerTests - thumbUpHandLandmarkerResult]]; + isApproximatelyEqualToExpectedResult:[MPPHandLandmarkerTests thumbUpHandLandmarkerResult]]; if (handLandmarker == _outOfOrderTimestampTestDict[kLiveStreamTestsDictHandLandmarkerKey]) { [_outOfOrderTimestampTestDict[kLiveStreamTestsDictExpectationKey] fulfill]; - } else if (handLandmarker == - _liveStreamSucceedsTestDict[kLiveStreamTestsDictHandLandmarkerKey]) { + } else if (handLandmarker == _liveStreamSucceedsTestDict[kLiveStreamTestsDictHandLandmarkerKey]) { [_liveStreamSucceedsTestDict[kLiveStreamTestsDictExpectationKey] fulfill]; } - } @end From 9ed7acc0a3e68afa4acfdd01ff5d346d2a5867aa Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Wed, 14 Jun 2023 15:59:54 +0530 Subject: [PATCH 061/106] Updated hand connections in iOS hand landmarker to class properties. --- .../sources/MPPHandLandmarker.h | 60 ++++++------------- 1 file changed, 18 insertions(+), 42 deletions(-) diff --git a/mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarker.h b/mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarker.h index 5149ec0ac..5a954af46 100644 --- a/mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarker.h +++ b/mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarker.h @@ -29,6 +29,24 @@ NS_ASSUME_NONNULL_BEGIN NS_SWIFT_NAME(HandLandmarker) @interface MPPHandLandmarker : NSObject +/** The array of connections between the landmarks in the palm. */ +@property(class, nonatomic, readonly) NSArray *handPalmConnections; + +/** The array of connections between the landmarks in the index finger. */ +@property(class, nonatomic, readonly) NSArray *handIndexFingerConnections; + +/** The array of connections between the landmarks in the middle finger. */ +@property(class, nonatomic, readonly) NSArray *handMiddleFingerConnections; + +/** The array of connections between the landmarks in the ring finger. */ +@property(class, nonatomic, readonly) NSArray *handRingFingerConnections; + +/** The array of connections between the landmarks in the pinky. */ +@property(class, nonatomic, readonly) NSArray *handPinkyConnections; + +/** The array of connections between all the landmarks in the hand. */ +@property(class, nonatomic, readonly) NSArray *handConnections; + /** * Creates a new instance of `MPPHandLandmarker` from an absolute path to a model asset bundle * stored locally on the device and the default `MPPHandLandmarkerOptions`. @@ -156,48 +174,6 @@ NS_SWIFT_NAME(HandLandmarker) - (instancetype)init NS_UNAVAILABLE; -/** - * Returns the connections between the landmarks in the palm. - * - * @return An array of connections between the landmarks in the palm. - */ -+ (NSArray *)handPalmConnections; - -/** - * Returns the connections between the landmarks in the index finger. - * - * @return An array of connections between the landmarks in the index finger. - */ -+ (NSArray *)handIndexFingerConnections; - -/** - * Returns the connections between the landmarks in the middle finger. - * - * @return An array of connections between the landmarks in the middle finger. - */ -+ (NSArray *)handMiddleFingerConnections; - -/** - * Returns the connections between the landmarks in the ring finger. - * - * @return An array of connections between the landmarks in the ring finger. - */ -+ (NSArray *)handRingFingerConnections; - -/** - * Returns the connections between the landmarks in the pinky. - * - * @return An array of connections between the landmarks in the pinky. - */ -+ (NSArray *)handPinkyConnections; - -/** - * Returns the connections between all the landmarks in the hand. - * - * @return An array of connections between all the landmarks in the hand. - */ -+ (NSArray *)handConnections; - + (instancetype)new NS_UNAVAILABLE; @end From 66a29bf37191de64f6470e3d49712a6a5d699688 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 14 Jun 2023 11:18:30 -0700 Subject: [PATCH 062/106] Internal change PiperOrigin-RevId: 540327302 --- mediapipe/framework/tool/BUILD | 1 + mediapipe/framework/tool/template_parser.cc | 7 ++++--- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/mediapipe/framework/tool/BUILD b/mediapipe/framework/tool/BUILD index 4ae0bb607..b7c563b92 100644 --- a/mediapipe/framework/tool/BUILD +++ b/mediapipe/framework/tool/BUILD @@ -530,6 +530,7 @@ cc_library( "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/container:flat_hash_set", "@com_google_absl//absl/memory", "@com_google_absl//absl/strings", ], diff --git a/mediapipe/framework/tool/template_parser.cc b/mediapipe/framework/tool/template_parser.cc index ad799c34f..743df9fb1 100644 --- a/mediapipe/framework/tool/template_parser.cc +++ b/mediapipe/framework/tool/template_parser.cc @@ -20,6 +20,7 @@ #include #include +#include "absl/container/flat_hash_set.h" #include "absl/memory/memory.h" #include "absl/strings/ascii.h" #include "absl/strings/numbers.h" @@ -1430,10 +1431,10 @@ std::vector GetFields(const Message* src) { // Orders map entries in dst to match src. void OrderMapEntries(const Message* src, Message* dst, - std::set* seen = nullptr) { - std::unique_ptr> seen_owner; + absl::flat_hash_set* seen = nullptr) { + std::unique_ptr> seen_owner; if (!seen) { - seen_owner = std::make_unique>(); + seen_owner = std::make_unique>(); seen = seen_owner.get(); } if (seen->count(src) > 0) { From a1be5f3e72ea4cd2a0e9ab24405c8afc2cf6e80b Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 14 Jun 2023 11:31:12 -0700 Subject: [PATCH 063/106] Add a test case for "summary packet" to test failing upstream calculator PiperOrigin-RevId: 540331486 --- mediapipe/framework/BUILD | 1 + .../calculator_graph_summary_packet_test.cc | 117 ++++++++++++++++-- 2 files changed, 111 insertions(+), 7 deletions(-) diff --git a/mediapipe/framework/BUILD b/mediapipe/framework/BUILD index 86608285b..93e9475f3 100644 --- a/mediapipe/framework/BUILD +++ b/mediapipe/framework/BUILD @@ -1368,6 +1368,7 @@ cc_test( "//mediapipe/framework/port:parse_text_proto", "//mediapipe/framework/stream_handler:immediate_input_stream_handler", "//mediapipe/framework/tool:sink", + "@com_google_absl//absl/status", ], ) diff --git a/mediapipe/framework/calculator_graph_summary_packet_test.cc b/mediapipe/framework/calculator_graph_summary_packet_test.cc index c8d1e7eb7..e6a04e060 100644 --- a/mediapipe/framework/calculator_graph_summary_packet_test.cc +++ b/mediapipe/framework/calculator_graph_summary_packet_test.cc @@ -1,3 +1,4 @@ +#include "absl/status/status.h" #include "mediapipe/framework/api2/node.h" #include "mediapipe/framework/api2/packet.h" #include "mediapipe/framework/api2/port.h" @@ -15,6 +16,7 @@ using ::mediapipe::api2::Node; using ::mediapipe::api2::Output; using ::testing::ElementsAre; using ::testing::Eq; +using ::testing::HasSubstr; using ::testing::IsEmpty; using ::testing::Value; @@ -57,6 +59,7 @@ class SummaryPacketCalculator : public Node { absl::Status Process(CalculatorContext* cc) final { if (!kIn(cc).IsEmpty()) { value_ += kIn(cc).Get(); + value_set_ = true; } if (kOut(cc).IsClosed()) { @@ -74,13 +77,19 @@ class SummaryPacketCalculator : public Node { // no next timestamp allowed in stream should always result in // InputStream::IsDone() == true. if (kIn(cc).IsDone() || !cc->InputTimestamp().HasNextAllowedInStream()) { - // kOut(cc).Send(value_) can be used here as well, however in the case of - // source calculator sending inputs into kIn the resulting timestamp is - // not well defined (e.g. it can be the last packet timestamp or - // Timestamp::Max()) - // TODO: last packet from source should always result in - // InputStream::IsDone() == true. - kOut(cc).Send(value_, Timestamp::Max()); + // `Process` may or may not be invoked for "done" timestamp bound when + // upstream calculator fails in `Close`. Hence, extra care is needed to + // identify whether the calculator needs to send output. + // TODO: remove when "done" timestamp bound flakiness fixed. + if (value_set_) { + // kOut(cc).Send(value_) can be used here as well, however in the case + // of source calculator sending inputs into kIn the resulting timestamp + // is not well defined (e.g. it can be the last packet timestamp or + // Timestamp::Max()) + // TODO: last packet from source should always result in + // InputStream::IsDone() == true. + kOut(cc).Send(value_, Timestamp::Max()); + } kOut(cc).Close(); } return absl::OkStatus(); @@ -88,6 +97,7 @@ class SummaryPacketCalculator : public Node { private: int value_ = 0; + bool value_set_ = false; }; MEDIAPIPE_REGISTER_NODE(SummaryPacketCalculator); @@ -323,5 +333,98 @@ TEST(SummaryPacketCalculatorUseCaseTest, EXPECT_THAT(output_packets, IsEmpty()); } +class FailureInCloseCalculator : public Node { + public: + static constexpr Input kIn{"IN"}; + static constexpr Output kOut{"INT"}; + + MEDIAPIPE_NODE_CONTRACT(kIn, kOut); + + absl::Status Process(CalculatorContext* cc) final { return absl::OkStatus(); } + + absl::Status Close(CalculatorContext* cc) final { + return absl::InternalError("error"); + } +}; +MEDIAPIPE_REGISTER_NODE(FailureInCloseCalculator); + +TEST(SummaryPacketCalculatorUseCaseTest, + DoesNotProduceSummaryPacketWhenUpstreamCalculatorFailsInClose) { + auto graph_config = ParseTextProtoOrDie(R"pb( + input_stream: "input" + node { + calculator: "FailureInCloseCalculator" + input_stream: "IN:input" + output_stream: "INT:int_value" + } + node { + calculator: "SummaryPacketCalculator" + input_stream: "IN:int_value" + output_stream: "SUMMARY:output" + } + )pb"); + std::vector output_packets; + tool::AddVectorSink("output", &graph_config, &output_packets); + + CalculatorGraph graph; + MP_ASSERT_OK(graph.Initialize(graph_config, {})); + MP_ASSERT_OK(graph.StartRun({})); + MP_ASSERT_OK(graph.WaitUntilIdle()); + EXPECT_THAT(output_packets, IsEmpty()); + + MP_ASSERT_OK(graph.CloseInputStream("input")); + EXPECT_THAT(graph.WaitUntilIdle(), + StatusIs(absl::StatusCode::kInternal, HasSubstr("error"))); + EXPECT_THAT(output_packets, IsEmpty()); +} + +class FailureInProcessCalculator : public Node { + public: + static constexpr Input kIn{"IN"}; + static constexpr Output kOut{"INT"}; + + MEDIAPIPE_NODE_CONTRACT(kIn, kOut); + + absl::Status Process(CalculatorContext* cc) final { + return absl::InternalError("error"); + } +}; +MEDIAPIPE_REGISTER_NODE(FailureInProcessCalculator); + +TEST(SummaryPacketCalculatorUseCaseTest, + DoesNotProduceSummaryPacketWhenUpstreamCalculatorFailsInProcess) { + auto graph_config = ParseTextProtoOrDie(R"pb( + input_stream: "input" + node { + calculator: "FailureInProcessCalculator" + input_stream: "IN:input" + output_stream: "INT:int_value" + } + node { + calculator: "SummaryPacketCalculator" + input_stream: "IN:int_value" + output_stream: "SUMMARY:output" + } + )pb"); + std::vector output_packets; + tool::AddVectorSink("output", &graph_config, &output_packets); + + CalculatorGraph graph; + MP_ASSERT_OK(graph.Initialize(graph_config, {})); + MP_ASSERT_OK(graph.StartRun({})); + MP_ASSERT_OK(graph.WaitUntilIdle()); + EXPECT_THAT(output_packets, IsEmpty()); + + auto send_packet = [&graph](int value, Timestamp timestamp) { + MP_ASSERT_OK(graph.AddPacketToInputStream( + "input", MakePacket(value).At(timestamp))); + }; + + send_packet(10, Timestamp::PostStream()); + EXPECT_THAT(graph.WaitUntilIdle(), + StatusIs(absl::StatusCode::kInternal, HasSubstr("error"))); + EXPECT_THAT(output_packets, IsEmpty()); +} + } // namespace } // namespace mediapipe From 4776ecf40228589da01cea45d6362ab2e9c8bca4 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 14 Jun 2023 13:21:11 -0700 Subject: [PATCH 064/106] Internal change PiperOrigin-RevId: 540361672 --- mediapipe/tasks/cc/vision/hand_landmarker/BUILD | 2 ++ mediapipe/tasks/cc/vision/hand_landmarker/proto/BUILD | 2 ++ 2 files changed, 4 insertions(+) diff --git a/mediapipe/tasks/cc/vision/hand_landmarker/BUILD b/mediapipe/tasks/cc/vision/hand_landmarker/BUILD index 2eecb61bf..f2afac494 100644 --- a/mediapipe/tasks/cc/vision/hand_landmarker/BUILD +++ b/mediapipe/tasks/cc/vision/hand_landmarker/BUILD @@ -153,6 +153,8 @@ cc_library( alwayslink = 1, ) +# TODO: open source hand joints graph + cc_library( name = "hand_landmarker_result", srcs = ["hand_landmarker_result.cc"], diff --git a/mediapipe/tasks/cc/vision/hand_landmarker/proto/BUILD b/mediapipe/tasks/cc/vision/hand_landmarker/proto/BUILD index d13f0afd5..8097d7ab1 100644 --- a/mediapipe/tasks/cc/vision/hand_landmarker/proto/BUILD +++ b/mediapipe/tasks/cc/vision/hand_landmarker/proto/BUILD @@ -41,3 +41,5 @@ mediapipe_proto_library( "//mediapipe/tasks/cc/vision/hand_detector/proto:hand_detector_graph_options_proto", ], ) + +# TODO: open source hand joints graph From e02d70f8e594ae25390efb7d97514fc698927ea8 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 14 Jun 2023 15:57:36 -0700 Subject: [PATCH 065/106] internal change PiperOrigin-RevId: 540404812 --- .../com/google/mediapipe/components/GlSurfaceViewRenderer.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/mediapipe/java/com/google/mediapipe/components/GlSurfaceViewRenderer.java b/mediapipe/java/com/google/mediapipe/components/GlSurfaceViewRenderer.java index 9321e82b4..591b6c987 100644 --- a/mediapipe/java/com/google/mediapipe/components/GlSurfaceViewRenderer.java +++ b/mediapipe/java/com/google/mediapipe/components/GlSurfaceViewRenderer.java @@ -34,6 +34,7 @@ import java.util.HashMap; import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; +import javax.annotation.Nullable; import javax.microedition.khronos.egl.EGLConfig; import javax.microedition.khronos.opengles.GL10; @@ -303,7 +304,7 @@ public class GlSurfaceViewRenderer implements GLSurfaceView.Renderer { } // Use this when the texture is not a SurfaceTexture. - public void setNextFrame(TextureFrame frame) { + public void setNextFrame(@Nullable TextureFrame frame) { if (surfaceTexture != null) { Matrix.setIdentityM(textureTransformMatrix, 0 /* offset */); } From 2e48a0bce0cfb1937f4793796b26fc2f08b1af22 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 14 Jun 2023 22:14:51 -0700 Subject: [PATCH 066/106] Remove designated initializers PiperOrigin-RevId: 540471772 --- mediapipe/util/tflite/op_resolver.cc | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/mediapipe/util/tflite/op_resolver.cc b/mediapipe/util/tflite/op_resolver.cc index 44eff4566..dc8728334 100644 --- a/mediapipe/util/tflite/op_resolver.cc +++ b/mediapipe/util/tflite/op_resolver.cc @@ -58,7 +58,8 @@ TfLiteRegistration* RegisterMaxPoolingWithArgmax2D() { }); return r; }(); - static TfLiteRegistration reg = {.registration_external = reg_external}; + static TfLiteRegistration reg{}; + reg.registration_external = reg_external; return ® } @@ -68,7 +69,8 @@ TfLiteRegistration* RegisterMaxUnpooling2D() { TfLiteRegistrationExternalCreate(kTfLiteBuiltinCustom, kMaxUnpooling2DOpName, kMaxUnpooling2DOpVersion); - static TfLiteRegistration reg = {.registration_external = reg_external}; + static TfLiteRegistration reg{}; + reg.registration_external = reg_external; return ® } @@ -78,7 +80,8 @@ TfLiteRegistration* RegisterConvolution2DTransposeBias() { TfLiteRegistrationExternalCreate(kTfLiteBuiltinCustom, kConvolution2DTransposeBiasOpName, kConvolution2DTransposeBiasOpVersion); - static TfLiteRegistration reg = {.registration_external = reg_external}; + static TfLiteRegistration reg{}; + reg.registration_external = reg_external; return ® } From c8f85ac060f3cb1358fdd574e3378afdbd230441 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Thu, 15 Jun 2023 14:06:52 +0530 Subject: [PATCH 067/106] Updated signature of initializer in MPPMask --- mediapipe/tasks/ios/vision/core/sources/MPPMask.h | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPMask.h b/mediapipe/tasks/ios/vision/core/sources/MPPMask.h index 1e70ef452..8cdf3af6f 100644 --- a/mediapipe/tasks/ios/vision/core/sources/MPPMask.h +++ b/mediapipe/tasks/ios/vision/core/sources/MPPMask.h @@ -93,8 +93,7 @@ NS_SWIFT_NAME(Mask) */ - (nullable instancetype)initWithFloat32Data:(const float *)float32Data width:(NSInteger)width - height:(NSInteger)height - error:(NSError **)error NS_DESIGNATED_INITIALIZER; + height:(NSInteger)height NS_DESIGNATED_INITIALIZER; // TODO: Add methods for CVPixelBuffer conversion. From a7f555fcc2006059b459831e7828a42850a727a2 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Thu, 15 Jun 2023 14:07:33 +0530 Subject: [PATCH 068/106] Fixed float calculations in MPPMask --- mediapipe/tasks/ios/vision/core/sources/MPPMask.mm | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm b/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm index cc6332676..87e967991 100644 --- a/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm +++ b/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm @@ -129,7 +129,7 @@ void copyData(const T *destination, const T *source, size_t length) { } float *data = allocateDataPtr(_allocatedFloat32Data, _width * _height); for (int i = 0; i < _width * _height; i++) { - data[i] = _uint8Data[i] / 255; + data[i] = (float)_uint8Data[i] / 255; } return data; } From aa1ab18000a987868143474c0645cb2d2ed99a6f Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Thu, 15 Jun 2023 14:09:22 +0530 Subject: [PATCH 069/106] Updated documentation in MPPMask --- mediapipe/tasks/ios/vision/core/sources/MPPMask.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPMask.h b/mediapipe/tasks/ios/vision/core/sources/MPPMask.h index 8cdf3af6f..6aa5e3a5b 100644 --- a/mediapipe/tasks/ios/vision/core/sources/MPPMask.h +++ b/mediapipe/tasks/ios/vision/core/sources/MPPMask.h @@ -34,7 +34,7 @@ typedef NS_ENUM(NSUInteger, MPPMaskDataType) { * Every mask is has an underlying type which can be accessed using `dataType`. You can access the * mask as any other type using the appropriate properties. For eg:, if the underlying type is * `MPPMaskDataTypeUInt8`, in addition to accessing the mask using `uint8Array`, you can access - * 'floatArray` to get the float 32 data. The first time you access the data as a type different + * 'floatArray` to get the 32 bit float data. The first time you access the data as a type different * from the underlying type, an expensive type conversion is performed. Subsequent accesses return a * pointer to the memory location fo the same type converted array. As type conversions can be * expensive, it is recommended to limit the accesses to data of types different from the underlying From 9d0fed89ffcb20e6f9ec08a0633c4e36ef29a706 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Thu, 15 Jun 2023 14:11:08 +0530 Subject: [PATCH 070/106] Fixed documentation in MPPMask --- mediapipe/tasks/ios/vision/core/sources/MPPMask.h | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPMask.h b/mediapipe/tasks/ios/vision/core/sources/MPPMask.h index 6aa5e3a5b..0df60d7d8 100644 --- a/mediapipe/tasks/ios/vision/core/sources/MPPMask.h +++ b/mediapipe/tasks/ios/vision/core/sources/MPPMask.h @@ -68,7 +68,7 @@ NS_SWIFT_NAME(Mask) @property(nonatomic, readonly, assign) const float *float32Data; /** - * Initializes an `MPPMask` object of tyep `MPPMaskDataTypeUInt8` with the given `UInt8*` data, + * Initializes an `MPPMask` object of type `MPPMaskDataTypeUInt8` with the given `UInt8*` data, * width and height. * * @param uint8Data A pointer to the memory location of the `UInt8` data array. @@ -82,7 +82,7 @@ NS_SWIFT_NAME(Mask) height:(NSInteger)height NS_DESIGNATED_INITIALIZER; /** - * Initializes an `MPPMask` object of tyep `MPPMaskDataTypeFloat32` with the given `float*` data, + * Initializes an `MPPMask` object of type `MPPMaskDataTypeFloat32` with the given `float*` data, * width and height. * * @param uint8Data A pointer to the memory location of the `float` data array. From 327547ec2b94bea9cbada21bff1b40eb6b1f40a1 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Thu, 15 Jun 2023 14:16:34 +0530 Subject: [PATCH 071/106] Updated variable names in MPPMask --- .../tasks/ios/vision/core/sources/MPPMask.mm | 22 ++++++++++--------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm b/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm index 87e967991..5aac59ec2 100644 --- a/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm +++ b/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm @@ -32,8 +32,8 @@ void copyData(const T *destination, const T *source, size_t length) { @interface MPPMask () { const UInt8 *_uint8Data; const float *_float32Data; - std::unique_ptr _allocatedUInt8Data; - std::unique_ptr _allocatedFloat32Data; + std::unique_ptr _uint8DataPtr; + std::unique_ptr _float32DataPtr; } @end @@ -84,7 +84,7 @@ void copyData(const T *destination, const T *source, size_t length) { height:(NSInteger)height { self = [self initWithWidth:width height:height dataType:MPPMaskDataTypeUInt8 error:nil]; if (self) { - _uint8Data = allocateDataPtr(_allocatedUInt8Data, _width * _height); + _uint8Data = allocateDataPtr(_uint8DataPtr, _width * _height); copyData(_uint8Data, uint8DataToCopy, _width * _height); } return self; @@ -95,7 +95,7 @@ void copyData(const T *destination, const T *source, size_t length) { height:(NSInteger)height { self = [self initWithWidth:width height:height dataType:MPPMaskDataTypeFloat32 error:nil]; if (self) { - _float32Data = allocateDataPtr(_allocatedFloat32Data, _width * _height); + _float32Data = allocateDataPtr(_float32DataPtr, _width * _height); copyData(_float32Data, float32DataToCopy, _width * _height); } return self; @@ -107,10 +107,10 @@ void copyData(const T *destination, const T *source, size_t length) { return _uint8Data; } case MPPMaskDataTypeFloat32: { - if (_allocatedUInt8Data) { - return _allocatedUInt8Data.get(); + if (_uint8DataPtr) { + return _uint8DataPtr.get(); } - UInt8 *data = allocateDataPtr(_allocatedUInt8Data, _width * _height); + UInt8 *data = allocateDataPtr(_uint8DataPtr, _width * _height); for (int i = 0; i < _width * _height; i++) { data[i] = _float32Data[i] * 255; } @@ -124,13 +124,15 @@ void copyData(const T *destination, const T *source, size_t length) { - (const float *)float32Data { switch (_dataType) { case MPPMaskDataTypeUInt8: { - if (_allocatedFloat32Data) { - return _allocatedFloat32Data.get(); + if (_float32DataPtr) { + NSLog(@"Get repeated"); + return _float32DataPtr.get(); } - float *data = allocateDataPtr(_allocatedFloat32Data, _width * _height); + float *data = allocateDataPtr(_float32DataPtr, _width * _height); for (int i = 0; i < _width * _height; i++) { data[i] = (float)_uint8Data[i] / 255; } + NSLog(@"Get new"); return data; } case MPPMaskDataTypeFloat32: { From 1f77fa9de43a9d0f995f4e9981478291aaefb701 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Thu, 15 Jun 2023 16:07:56 +0530 Subject: [PATCH 072/106] Removed generic methods for alloc and memcpy from MPPMask --- .../tasks/ios/vision/core/sources/MPPMask.h | 2 - .../tasks/ios/vision/core/sources/MPPMask.mm | 41 ++++++++----------- 2 files changed, 18 insertions(+), 25 deletions(-) diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPMask.h b/mediapipe/tasks/ios/vision/core/sources/MPPMask.h index 0df60d7d8..65af32d10 100644 --- a/mediapipe/tasks/ios/vision/core/sources/MPPMask.h +++ b/mediapipe/tasks/ios/vision/core/sources/MPPMask.h @@ -95,10 +95,8 @@ NS_SWIFT_NAME(Mask) width:(NSInteger)width height:(NSInteger)height NS_DESIGNATED_INITIALIZER; - // TODO: Add methods for CVPixelBuffer conversion. - /** Unavailable. */ - (instancetype)init NS_UNAVAILABLE; diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm b/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm index 5aac59ec2..84a4eb4b5 100644 --- a/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm +++ b/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm @@ -16,19 +16,6 @@ #import "mediapipe/tasks/ios/common/sources/MPPCommon.h" #import "mediapipe/tasks/ios/common/utils/sources/MPPCommonUtils.h" -namespace { -template -T *allocateDataPtr(std::unique_ptr &data, size_t length) { - data = std::unique_ptr(new T[length]); - return data.get(); -} - -template -void copyData(const T *destination, const T *source, size_t length) { - memcpy((void *)destination, source, length * sizeof(T)); -} -} // namespace - @interface MPPMask () { const UInt8 *_uint8Data; const float *_float32Data; @@ -84,8 +71,10 @@ void copyData(const T *destination, const T *source, size_t length) { height:(NSInteger)height { self = [self initWithWidth:width height:height dataType:MPPMaskDataTypeUInt8 error:nil]; if (self) { - _uint8Data = allocateDataPtr(_uint8DataPtr, _width * _height); - copyData(_uint8Data, uint8DataToCopy, _width * _height); + size_t length = _width * _height; + _uint8DataPtr = std::unique_ptr(new UInt8[length]); + _uint8Data = _uint8DataPtr.get(); + memcpy((UInt8 *)_uint8Data, uint8DataToCopy, length * sizeof(UInt8)); } return self; } @@ -95,8 +84,10 @@ void copyData(const T *destination, const T *source, size_t length) { height:(NSInteger)height { self = [self initWithWidth:width height:height dataType:MPPMaskDataTypeFloat32 error:nil]; if (self) { - _float32Data = allocateDataPtr(_float32DataPtr, _width * _height); - copyData(_float32Data, float32DataToCopy, _width * _height); + size_t length = _width * _height; + _float32DataPtr = std::unique_ptr(new float[length]); + _float32Data = _float32DataPtr.get(); + memcpy((float *)_float32Data, float32DataToCopy, length * sizeof(float)); } return self; } @@ -110,8 +101,11 @@ void copyData(const T *destination, const T *source, size_t length) { if (_uint8DataPtr) { return _uint8DataPtr.get(); } - UInt8 *data = allocateDataPtr(_uint8DataPtr, _width * _height); - for (int i = 0; i < _width * _height; i++) { + + size_t length = _width * _height; + _uint8DataPtr = std::unique_ptr(new UInt8[length]); + UInt8 *data = _uint8DataPtr.get(); + for (int i = 0; i < length; i++) { data[i] = _float32Data[i] * 255; } return data; @@ -125,14 +119,15 @@ void copyData(const T *destination, const T *source, size_t length) { switch (_dataType) { case MPPMaskDataTypeUInt8: { if (_float32DataPtr) { - NSLog(@"Get repeated"); return _float32DataPtr.get(); } - float *data = allocateDataPtr(_float32DataPtr, _width * _height); - for (int i = 0; i < _width * _height; i++) { + + size_t length = _width * _height; + _float32DataPtr = std::unique_ptr(new float[length]); + float *data = _float32DataPtr.get(); + for (int i = 0; i < length; i++) { data[i] = (float)_uint8Data[i] / 255; } - NSLog(@"Get new"); return data; } case MPPMaskDataTypeFloat32: { From e73ea2326179c1997e9f0e86c5711a21a0d7f79c Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Thu, 15 Jun 2023 09:23:15 -0700 Subject: [PATCH 073/106] Internal change PiperOrigin-RevId: 540603621 --- setup.py | 69 ++++++++++++++++++++++++-------------------------------- 1 file changed, 29 insertions(+), 40 deletions(-) diff --git a/setup.py b/setup.py index 4eaa0dcf2..d801cd986 100644 --- a/setup.py +++ b/setup.py @@ -20,6 +20,7 @@ import os import platform import posixpath import re +import shlex import shutil import subprocess import sys @@ -38,6 +39,15 @@ MP_DIR_INIT_PY = os.path.join(MP_ROOT_PATH, 'mediapipe/__init__.py') MP_THIRD_PARTY_BUILD = os.path.join(MP_ROOT_PATH, 'third_party/BUILD') MP_ROOT_INIT_PY = os.path.join(MP_ROOT_PATH, '__init__.py') +GPU_OPTIONS_DISBALED = ['--define=MEDIAPIPE_DISABLE_GPU=1'] +GPU_OPTIONS_ENBALED = [ + '--copt=-DTFLITE_GPU_EXTRA_GLES_DEPS', + '--copt=-DMEDIAPIPE_OMIT_EGL_WINDOW_BIT', + '--copt=-DMESA_EGL_NO_X11_HEADERS', + '--copt=-DEGL_NO_X11', +] +GPU_OPTIONS = GPU_OPTIONS_DISBALED if MP_DISABLE_GPU else GPU_OPTIONS_ENBALED + def _normalize_path(path): return path.replace('\\', '/') if IS_WINDOWS else path @@ -140,6 +150,16 @@ def _copy_to_build_lib_dir(build_lib, file): shutil.copyfile(os.path.join('bazel-bin/', file), dst) +def _invoke_shell_command(shell_commands): + """Invokes shell command from the list of arguments.""" + print('Invoking:', shlex.join(shell_commands)) + try: + subprocess.run(shell_commands, check=True) + except subprocess.CalledProcessError as e: + print(e) + sys.exit(e.returncode) + + class GeneratePyProtos(build_ext.build_ext): """Generate MediaPipe Python protobuf files by Protocol Compiler.""" @@ -204,9 +224,7 @@ class GeneratePyProtos(build_ext.build_ext): self._protoc, '-I.', '--python_out=' + os.path.abspath(self.build_lib), source ] - print('Invoking: ', protoc_command) - if subprocess.call(protoc_command) != 0: - sys.exit(-1) + _invoke_shell_command(protoc_command) class BuildModules(build_ext.build_ext): @@ -269,9 +287,7 @@ class BuildModules(build_ext.build_ext): 'build', external_file, ] - print('Invoking: ', fetch_model_command) - if subprocess.call(fetch_model_command) != 0: - sys.exit(-1) + _invoke_shell_command(fetch_model_command) _copy_to_build_lib_dir(self.build_lib, external_file) def _generate_binary_graph(self, binary_graph_target): @@ -284,20 +300,12 @@ class BuildModules(build_ext.build_ext): '--copt=-DNDEBUG', '--action_env=PYTHON_BIN_PATH=' + _normalize_path(sys.executable), binary_graph_target, - ] - - if MP_DISABLE_GPU: - bazel_command.append('--define=MEDIAPIPE_DISABLE_GPU=1') - else: - bazel_command.append('--copt=-DMESA_EGL_NO_X11_HEADERS') - bazel_command.append('--copt=-DEGL_NO_X11') + ] + GPU_OPTIONS if not self.link_opencv and not IS_WINDOWS: bazel_command.append('--define=OPENCV=source') - print('Invoking: ', bazel_command) - if subprocess.call(bazel_command) != 0: - sys.exit(-1) + _invoke_shell_command(bazel_command) _copy_to_build_lib_dir(self.build_lib, binary_graph_target + '.binarypb') @@ -318,17 +326,9 @@ class GenerateMetadataSchema(build_ext.build_ext): '--compilation_mode=opt', '--action_env=PYTHON_BIN_PATH=' + _normalize_path(sys.executable), '//mediapipe/tasks/metadata:' + target, - ] + ] + GPU_OPTIONS - if MP_DISABLE_GPU: - bazel_command.append('--define=MEDIAPIPE_DISABLE_GPU=1') - else: - bazel_command.append('--copt=-DMESA_EGL_NO_X11_HEADERS') - bazel_command.append('--copt=-DEGL_NO_X11') - - print('Invoking: ', bazel_command) - if subprocess.call(bazel_command) != 0: - sys.exit(-1) + _invoke_shell_command(bazel_command) _copy_to_build_lib_dir( self.build_lib, 'mediapipe/tasks/metadata/' + target + '_generated.py') @@ -397,10 +397,7 @@ class BuildExtension(build_ext.build_ext): x86_name, arm64_name, ] - - print('Invoking: ', lipo_command) - if subprocess.call(lipo_command) != 0: - sys.exit(-1) + _invoke_shell_command(lipo_command) else: for ext in self.extensions: self._build_binary(ext) @@ -416,22 +413,14 @@ class BuildExtension(build_ext.build_ext): '--copt=-DNDEBUG', '--action_env=PYTHON_BIN_PATH=' + _normalize_path(sys.executable), str(ext.bazel_target + '.so'), - ] - - if MP_DISABLE_GPU: - bazel_command.append('--define=MEDIAPIPE_DISABLE_GPU=1') - else: - bazel_command.append('--copt=-DMESA_EGL_NO_X11_HEADERS') - bazel_command.append('--copt=-DEGL_NO_X11') + ] + GPU_OPTIONS if extra_args: bazel_command += extra_args if not self.link_opencv and not IS_WINDOWS: bazel_command.append('--define=OPENCV=source') - print('Invoking: ', bazel_command) - if subprocess.call(bazel_command) != 0: - sys.exit(-1) + _invoke_shell_command(bazel_command) ext_bazel_bin_path = os.path.join('bazel-bin', ext.relpath, ext.target_name + '.so') ext_dest_path = self.get_ext_fullpath(ext.name) From 6f065bc4054ed2102ed7794b4149598e909f42ff Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 15 Jun 2023 10:20:27 -0700 Subject: [PATCH 074/106] Update Tensorflow dependency in MediaPipe PiperOrigin-RevId: 540619536 --- WORKSPACE | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/WORKSPACE b/WORKSPACE index 1d7ced979..25033fab0 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -485,9 +485,10 @@ http_archive( ) # TensorFlow repo should always go after the other external dependencies. -# TF on 2023-05-26. -_TENSORFLOW_GIT_COMMIT = "67d5c561981edc45daf3f9d73ddd1a77963733ca" -_TENSORFLOW_SHA256 = "0c8326285e9cb695313e194b97d388eea70bf8bf5b13e8f0962ca8eed5179ece" +# TF on 2023-06-13. +_TENSORFLOW_GIT_COMMIT = "491681a5620e41bf079a582ac39c585cc86878b9" +# curl -L https://github.com/tensorflow/tensorflow/archive/.tar.gz | shasum -a 256 +_TENSORFLOW_SHA256 = "9f76389af7a2835e68413322c1eaabfadc912f02a76d71dc16be507f9ca3d3ac" http_archive( name = "org_tensorflow", urls = [ From 83486ed01b7e948b4d7dd0d8f356a3ae4970821c Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Fri, 16 Jun 2023 19:56:04 +0530 Subject: [PATCH 075/106] Updated init method implementations in MPPMask --- mediapipe/framework/tool/ios.bzl | 2 +- .../tasks/ios/vision/core/sources/MPPMask.h | 17 +++++- .../tasks/ios/vision/core/sources/MPPMask.mm | 56 ++++++++----------- 3 files changed, 39 insertions(+), 36 deletions(-) diff --git a/mediapipe/framework/tool/ios.bzl b/mediapipe/framework/tool/ios.bzl index c97b092e1..a0fe0be55 100644 --- a/mediapipe/framework/tool/ios.bzl +++ b/mediapipe/framework/tool/ios.bzl @@ -14,7 +14,7 @@ """MediaPipe Task Library Helper Rules for iOS""" -MPP_TASK_MINIMUM_OS_VERSION = "11.0" +MPP_TASK_MINIMUM_OS_VERSION = "12.0" # When the static framework is built with bazel, the all header files are moved # to the "Headers" directory with no header path prefixes. This auxiliary rule diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPMask.h b/mediapipe/tasks/ios/vision/core/sources/MPPMask.h index 65af32d10..176e9b20d 100644 --- a/mediapipe/tasks/ios/vision/core/sources/MPPMask.h +++ b/mediapipe/tasks/ios/vision/core/sources/MPPMask.h @@ -71,21 +71,31 @@ NS_SWIFT_NAME(Mask) * Initializes an `MPPMask` object of type `MPPMaskDataTypeUInt8` with the given `UInt8*` data, * width and height. * + * If `shouldCopy` is set to `YES`, the newly created `MPPMask` stores a reference to a deep copied + * `uint8Data`. Since deep copies are expensive, it is recommended to not set `shouldCopy` unless + * the `MPPMask` must outlive the passed in `uint8Data`. + * * @param uint8Data A pointer to the memory location of the `UInt8` data array. * @param width The width of the mask. * @param height The height of the mask. + * @param shouldCopy The height of the mask. * * @return A new `MPPMask` instance with the given `UInt8*` data, width and height. */ - (nullable instancetype)initWithUInt8Data:(const UInt8 *)uint8Data width:(NSInteger)width - height:(NSInteger)height NS_DESIGNATED_INITIALIZER; + height:(NSInteger)height + shouldCopy:(BOOL)shouldCopy NS_DESIGNATED_INITIALIZER; /** * Initializes an `MPPMask` object of type `MPPMaskDataTypeFloat32` with the given `float*` data, * width and height. * - * @param uint8Data A pointer to the memory location of the `float` data array. + * If `shouldCopy` is set to `YES`, the newly created `MPPMask` stores a reference to a deep copied + * `float32Data`. Since deep copies are expensive, it is recommended to not set `shouldCopy` unless + * the `MPPMask` must outlive the passed in `float32Data`. + * + * @param float32Data A pointer to the memory location of the `float` data array. * @param width The width of the mask. * @param height The height of the mask. * @@ -93,7 +103,8 @@ NS_SWIFT_NAME(Mask) */ - (nullable instancetype)initWithFloat32Data:(const float *)float32Data width:(NSInteger)width - height:(NSInteger)height NS_DESIGNATED_INITIALIZER; + height:(NSInteger)height + shouldCopy:(BOOL)shouldCopy NS_DESIGNATED_INITIALIZER; // TODO: Add methods for CVPixelBuffer conversion. diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm b/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm index 84a4eb4b5..3342218a6 100644 --- a/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm +++ b/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm @@ -48,46 +48,36 @@ - (nullable instancetype)initWithUInt8Data:(const UInt8 *)uint8Data width:(NSInteger)width - height:(NSInteger)height { + height:(NSInteger)height + shouldCopy:(BOOL)shouldCopy { self = [self initWithWidth:width height:height dataType:MPPMaskDataTypeUInt8 error:nil]; if (self) { - _uint8Data = uint8Data; + if (shouldCopy) { + size_t length = _width * _height; + _float32DataPtr = std::unique_ptr(new float[length]); + _float32Data = _float32DataPtr.get(); + memcpy((float *)_float32Data, float32DataToCopy, length * sizeof(float)); + } else { + _uint8Data = uint8Data; + } } return self; } - (nullable instancetype)initWithFloat32Data:(const float *)float32Data width:(NSInteger)width - height:(NSInteger)height { + height:(NSInteger)height + shouldCopy:(BOO)shouldCopy { self = [self initWithWidth:width height:height dataType:MPPMaskDataTypeFloat32 error:nil]; if (self) { - _float32Data = float32Data; - } - return self; -} - -- (instancetype)initWithUInt8DataToCopy:(const UInt8 *)uint8DataToCopy - width:(NSInteger)width - height:(NSInteger)height { - self = [self initWithWidth:width height:height dataType:MPPMaskDataTypeUInt8 error:nil]; - if (self) { - size_t length = _width * _height; - _uint8DataPtr = std::unique_ptr(new UInt8[length]); - _uint8Data = _uint8DataPtr.get(); - memcpy((UInt8 *)_uint8Data, uint8DataToCopy, length * sizeof(UInt8)); - } - return self; -} - -- (instancetype)initWithFloat32DataToCopy:(const float *)float32DataToCopy - width:(NSInteger)width - height:(NSInteger)height { - self = [self initWithWidth:width height:height dataType:MPPMaskDataTypeFloat32 error:nil]; - if (self) { - size_t length = _width * _height; - _float32DataPtr = std::unique_ptr(new float[length]); - _float32Data = _float32DataPtr.get(); - memcpy((float *)_float32Data, float32DataToCopy, length * sizeof(float)); + if (shouldCopy) { + size_t length = _width * _height; + _uint8DataPtr = std::unique_ptr(new UInt8[length]); + _uint8Data = _uint8DataPtr.get(); + memcpy((UInt8 *)_uint8Data, uint8DataToCopy, length * sizeof(UInt8)); + } else { + _float32Data = float32Data; + } } return self; } @@ -143,11 +133,13 @@ case MPPMaskDataTypeUInt8: return [[MPPMask alloc] initWithUInt8DataToCopy:self.uint8Data width:self.width - height:self.height]; + height:self.height + shouldCopy:YES]; case MPPMaskDataTypeFloat32: return [[MPPMask alloc] initWithFloat32DataToCopy:self.float32Data width:self.width - height:self.height]; + height:self.height + shouldCopy:YES]; } } From 52f6b8d8993b8f1c79d8157a1933e2cfe7e96812 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Fri, 16 Jun 2023 19:56:23 +0530 Subject: [PATCH 076/106] Revert "Updated init method implementations in MPPMask" This reverts commit 83486ed01b7e948b4d7dd0d8f356a3ae4970821c. --- mediapipe/framework/tool/ios.bzl | 2 +- .../tasks/ios/vision/core/sources/MPPMask.h | 17 +----- .../tasks/ios/vision/core/sources/MPPMask.mm | 56 +++++++++++-------- 3 files changed, 36 insertions(+), 39 deletions(-) diff --git a/mediapipe/framework/tool/ios.bzl b/mediapipe/framework/tool/ios.bzl index a0fe0be55..c97b092e1 100644 --- a/mediapipe/framework/tool/ios.bzl +++ b/mediapipe/framework/tool/ios.bzl @@ -14,7 +14,7 @@ """MediaPipe Task Library Helper Rules for iOS""" -MPP_TASK_MINIMUM_OS_VERSION = "12.0" +MPP_TASK_MINIMUM_OS_VERSION = "11.0" # When the static framework is built with bazel, the all header files are moved # to the "Headers" directory with no header path prefixes. This auxiliary rule diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPMask.h b/mediapipe/tasks/ios/vision/core/sources/MPPMask.h index 176e9b20d..65af32d10 100644 --- a/mediapipe/tasks/ios/vision/core/sources/MPPMask.h +++ b/mediapipe/tasks/ios/vision/core/sources/MPPMask.h @@ -71,31 +71,21 @@ NS_SWIFT_NAME(Mask) * Initializes an `MPPMask` object of type `MPPMaskDataTypeUInt8` with the given `UInt8*` data, * width and height. * - * If `shouldCopy` is set to `YES`, the newly created `MPPMask` stores a reference to a deep copied - * `uint8Data`. Since deep copies are expensive, it is recommended to not set `shouldCopy` unless - * the `MPPMask` must outlive the passed in `uint8Data`. - * * @param uint8Data A pointer to the memory location of the `UInt8` data array. * @param width The width of the mask. * @param height The height of the mask. - * @param shouldCopy The height of the mask. * * @return A new `MPPMask` instance with the given `UInt8*` data, width and height. */ - (nullable instancetype)initWithUInt8Data:(const UInt8 *)uint8Data width:(NSInteger)width - height:(NSInteger)height - shouldCopy:(BOOL)shouldCopy NS_DESIGNATED_INITIALIZER; + height:(NSInteger)height NS_DESIGNATED_INITIALIZER; /** * Initializes an `MPPMask` object of type `MPPMaskDataTypeFloat32` with the given `float*` data, * width and height. * - * If `shouldCopy` is set to `YES`, the newly created `MPPMask` stores a reference to a deep copied - * `float32Data`. Since deep copies are expensive, it is recommended to not set `shouldCopy` unless - * the `MPPMask` must outlive the passed in `float32Data`. - * - * @param float32Data A pointer to the memory location of the `float` data array. + * @param uint8Data A pointer to the memory location of the `float` data array. * @param width The width of the mask. * @param height The height of the mask. * @@ -103,8 +93,7 @@ NS_SWIFT_NAME(Mask) */ - (nullable instancetype)initWithFloat32Data:(const float *)float32Data width:(NSInteger)width - height:(NSInteger)height - shouldCopy:(BOOL)shouldCopy NS_DESIGNATED_INITIALIZER; + height:(NSInteger)height NS_DESIGNATED_INITIALIZER; // TODO: Add methods for CVPixelBuffer conversion. diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm b/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm index 3342218a6..84a4eb4b5 100644 --- a/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm +++ b/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm @@ -48,36 +48,46 @@ - (nullable instancetype)initWithUInt8Data:(const UInt8 *)uint8Data width:(NSInteger)width - height:(NSInteger)height - shouldCopy:(BOOL)shouldCopy { + height:(NSInteger)height { self = [self initWithWidth:width height:height dataType:MPPMaskDataTypeUInt8 error:nil]; if (self) { - if (shouldCopy) { - size_t length = _width * _height; - _float32DataPtr = std::unique_ptr(new float[length]); - _float32Data = _float32DataPtr.get(); - memcpy((float *)_float32Data, float32DataToCopy, length * sizeof(float)); - } else { - _uint8Data = uint8Data; - } + _uint8Data = uint8Data; } return self; } - (nullable instancetype)initWithFloat32Data:(const float *)float32Data width:(NSInteger)width - height:(NSInteger)height - shouldCopy:(BOO)shouldCopy { + height:(NSInteger)height { self = [self initWithWidth:width height:height dataType:MPPMaskDataTypeFloat32 error:nil]; if (self) { - if (shouldCopy) { - size_t length = _width * _height; - _uint8DataPtr = std::unique_ptr(new UInt8[length]); - _uint8Data = _uint8DataPtr.get(); - memcpy((UInt8 *)_uint8Data, uint8DataToCopy, length * sizeof(UInt8)); - } else { - _float32Data = float32Data; - } + _float32Data = float32Data; + } + return self; +} + +- (instancetype)initWithUInt8DataToCopy:(const UInt8 *)uint8DataToCopy + width:(NSInteger)width + height:(NSInteger)height { + self = [self initWithWidth:width height:height dataType:MPPMaskDataTypeUInt8 error:nil]; + if (self) { + size_t length = _width * _height; + _uint8DataPtr = std::unique_ptr(new UInt8[length]); + _uint8Data = _uint8DataPtr.get(); + memcpy((UInt8 *)_uint8Data, uint8DataToCopy, length * sizeof(UInt8)); + } + return self; +} + +- (instancetype)initWithFloat32DataToCopy:(const float *)float32DataToCopy + width:(NSInteger)width + height:(NSInteger)height { + self = [self initWithWidth:width height:height dataType:MPPMaskDataTypeFloat32 error:nil]; + if (self) { + size_t length = _width * _height; + _float32DataPtr = std::unique_ptr(new float[length]); + _float32Data = _float32DataPtr.get(); + memcpy((float *)_float32Data, float32DataToCopy, length * sizeof(float)); } return self; } @@ -133,13 +143,11 @@ case MPPMaskDataTypeUInt8: return [[MPPMask alloc] initWithUInt8DataToCopy:self.uint8Data width:self.width - height:self.height - shouldCopy:YES]; + height:self.height]; case MPPMaskDataTypeFloat32: return [[MPPMask alloc] initWithFloat32DataToCopy:self.float32Data width:self.width - height:self.height - shouldCopy:YES]; + height:self.height]; } } From fec2fc77e00f622f149519c49303b015deeff31e Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Fri, 16 Jun 2023 19:56:32 +0530 Subject: [PATCH 077/106] Revert "Revert "Updated init method implementations in MPPMask"" This reverts commit 52f6b8d8993b8f1c79d8157a1933e2cfe7e96812. --- mediapipe/framework/tool/ios.bzl | 2 +- .../tasks/ios/vision/core/sources/MPPMask.h | 17 +++++- .../tasks/ios/vision/core/sources/MPPMask.mm | 56 ++++++++----------- 3 files changed, 39 insertions(+), 36 deletions(-) diff --git a/mediapipe/framework/tool/ios.bzl b/mediapipe/framework/tool/ios.bzl index c97b092e1..a0fe0be55 100644 --- a/mediapipe/framework/tool/ios.bzl +++ b/mediapipe/framework/tool/ios.bzl @@ -14,7 +14,7 @@ """MediaPipe Task Library Helper Rules for iOS""" -MPP_TASK_MINIMUM_OS_VERSION = "11.0" +MPP_TASK_MINIMUM_OS_VERSION = "12.0" # When the static framework is built with bazel, the all header files are moved # to the "Headers" directory with no header path prefixes. This auxiliary rule diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPMask.h b/mediapipe/tasks/ios/vision/core/sources/MPPMask.h index 65af32d10..176e9b20d 100644 --- a/mediapipe/tasks/ios/vision/core/sources/MPPMask.h +++ b/mediapipe/tasks/ios/vision/core/sources/MPPMask.h @@ -71,21 +71,31 @@ NS_SWIFT_NAME(Mask) * Initializes an `MPPMask` object of type `MPPMaskDataTypeUInt8` with the given `UInt8*` data, * width and height. * + * If `shouldCopy` is set to `YES`, the newly created `MPPMask` stores a reference to a deep copied + * `uint8Data`. Since deep copies are expensive, it is recommended to not set `shouldCopy` unless + * the `MPPMask` must outlive the passed in `uint8Data`. + * * @param uint8Data A pointer to the memory location of the `UInt8` data array. * @param width The width of the mask. * @param height The height of the mask. + * @param shouldCopy The height of the mask. * * @return A new `MPPMask` instance with the given `UInt8*` data, width and height. */ - (nullable instancetype)initWithUInt8Data:(const UInt8 *)uint8Data width:(NSInteger)width - height:(NSInteger)height NS_DESIGNATED_INITIALIZER; + height:(NSInteger)height + shouldCopy:(BOOL)shouldCopy NS_DESIGNATED_INITIALIZER; /** * Initializes an `MPPMask` object of type `MPPMaskDataTypeFloat32` with the given `float*` data, * width and height. * - * @param uint8Data A pointer to the memory location of the `float` data array. + * If `shouldCopy` is set to `YES`, the newly created `MPPMask` stores a reference to a deep copied + * `float32Data`. Since deep copies are expensive, it is recommended to not set `shouldCopy` unless + * the `MPPMask` must outlive the passed in `float32Data`. + * + * @param float32Data A pointer to the memory location of the `float` data array. * @param width The width of the mask. * @param height The height of the mask. * @@ -93,7 +103,8 @@ NS_SWIFT_NAME(Mask) */ - (nullable instancetype)initWithFloat32Data:(const float *)float32Data width:(NSInteger)width - height:(NSInteger)height NS_DESIGNATED_INITIALIZER; + height:(NSInteger)height + shouldCopy:(BOOL)shouldCopy NS_DESIGNATED_INITIALIZER; // TODO: Add methods for CVPixelBuffer conversion. diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm b/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm index 84a4eb4b5..3342218a6 100644 --- a/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm +++ b/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm @@ -48,46 +48,36 @@ - (nullable instancetype)initWithUInt8Data:(const UInt8 *)uint8Data width:(NSInteger)width - height:(NSInteger)height { + height:(NSInteger)height + shouldCopy:(BOOL)shouldCopy { self = [self initWithWidth:width height:height dataType:MPPMaskDataTypeUInt8 error:nil]; if (self) { - _uint8Data = uint8Data; + if (shouldCopy) { + size_t length = _width * _height; + _float32DataPtr = std::unique_ptr(new float[length]); + _float32Data = _float32DataPtr.get(); + memcpy((float *)_float32Data, float32DataToCopy, length * sizeof(float)); + } else { + _uint8Data = uint8Data; + } } return self; } - (nullable instancetype)initWithFloat32Data:(const float *)float32Data width:(NSInteger)width - height:(NSInteger)height { + height:(NSInteger)height + shouldCopy:(BOO)shouldCopy { self = [self initWithWidth:width height:height dataType:MPPMaskDataTypeFloat32 error:nil]; if (self) { - _float32Data = float32Data; - } - return self; -} - -- (instancetype)initWithUInt8DataToCopy:(const UInt8 *)uint8DataToCopy - width:(NSInteger)width - height:(NSInteger)height { - self = [self initWithWidth:width height:height dataType:MPPMaskDataTypeUInt8 error:nil]; - if (self) { - size_t length = _width * _height; - _uint8DataPtr = std::unique_ptr(new UInt8[length]); - _uint8Data = _uint8DataPtr.get(); - memcpy((UInt8 *)_uint8Data, uint8DataToCopy, length * sizeof(UInt8)); - } - return self; -} - -- (instancetype)initWithFloat32DataToCopy:(const float *)float32DataToCopy - width:(NSInteger)width - height:(NSInteger)height { - self = [self initWithWidth:width height:height dataType:MPPMaskDataTypeFloat32 error:nil]; - if (self) { - size_t length = _width * _height; - _float32DataPtr = std::unique_ptr(new float[length]); - _float32Data = _float32DataPtr.get(); - memcpy((float *)_float32Data, float32DataToCopy, length * sizeof(float)); + if (shouldCopy) { + size_t length = _width * _height; + _uint8DataPtr = std::unique_ptr(new UInt8[length]); + _uint8Data = _uint8DataPtr.get(); + memcpy((UInt8 *)_uint8Data, uint8DataToCopy, length * sizeof(UInt8)); + } else { + _float32Data = float32Data; + } } return self; } @@ -143,11 +133,13 @@ case MPPMaskDataTypeUInt8: return [[MPPMask alloc] initWithUInt8DataToCopy:self.uint8Data width:self.width - height:self.height]; + height:self.height + shouldCopy:YES]; case MPPMaskDataTypeFloat32: return [[MPPMask alloc] initWithFloat32DataToCopy:self.float32Data width:self.width - height:self.height]; + height:self.height + shouldCopy:YES]; } } From 4ab1a5de1b8d12bcb190ce6f91ea63dd623c3149 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Fri, 16 Jun 2023 19:59:59 +0530 Subject: [PATCH 078/106] Reverted changes to iOS tasks deployment target --- mediapipe/framework/tool/ios.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mediapipe/framework/tool/ios.bzl b/mediapipe/framework/tool/ios.bzl index a0fe0be55..c97b092e1 100644 --- a/mediapipe/framework/tool/ios.bzl +++ b/mediapipe/framework/tool/ios.bzl @@ -14,7 +14,7 @@ """MediaPipe Task Library Helper Rules for iOS""" -MPP_TASK_MINIMUM_OS_VERSION = "12.0" +MPP_TASK_MINIMUM_OS_VERSION = "11.0" # When the static framework is built with bazel, the all header files are moved # to the "Headers" directory with no header path prefixes. This auxiliary rule From d12dd88f518af3c7bc584e5044ffd55ceffdd8be Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Fri, 16 Jun 2023 20:00:30 +0530 Subject: [PATCH 079/106] Fixed implementation of init methods in MPPMask --- .../tasks/ios/vision/core/sources/MPPMask.mm | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm b/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm index 3342218a6..b1a6ca218 100644 --- a/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm +++ b/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm @@ -54,9 +54,9 @@ if (self) { if (shouldCopy) { size_t length = _width * _height; - _float32DataPtr = std::unique_ptr(new float[length]); - _float32Data = _float32DataPtr.get(); - memcpy((float *)_float32Data, float32DataToCopy, length * sizeof(float)); + _uint8DataPtr = std::unique_ptr(new UInt8[length]); + _uint8Data = _uint8DataPtr.get(); + memcpy((UInt8 *)_uint8Data, uint8Data, length * sizeof(UInt8)); } else { _uint8Data = uint8Data; } @@ -67,14 +67,14 @@ - (nullable instancetype)initWithFloat32Data:(const float *)float32Data width:(NSInteger)width height:(NSInteger)height - shouldCopy:(BOO)shouldCopy { + shouldCopy:(BOOL)shouldCopy { self = [self initWithWidth:width height:height dataType:MPPMaskDataTypeFloat32 error:nil]; if (self) { if (shouldCopy) { - size_t length = _width * _height; - _uint8DataPtr = std::unique_ptr(new UInt8[length]); - _uint8Data = _uint8DataPtr.get(); - memcpy((UInt8 *)_uint8Data, uint8DataToCopy, length * sizeof(UInt8)); + size_t length = _width * _height; + _float32DataPtr = std::unique_ptr(new float[length]); + _float32Data = _float32DataPtr.get(); + memcpy((float *)_float32Data, float32Data, length * sizeof(float)); } else { _float32Data = float32Data; } @@ -131,12 +131,12 @@ - (id)copyWithZone:(NSZone *)zone { switch (_dataType) { case MPPMaskDataTypeUInt8: - return [[MPPMask alloc] initWithUInt8DataToCopy:self.uint8Data + return [[MPPMask alloc] initWithUInt8Data:self.uint8Data width:self.width height:self.height shouldCopy:YES]; case MPPMaskDataTypeFloat32: - return [[MPPMask alloc] initWithFloat32DataToCopy:self.float32Data + return [[MPPMask alloc] initWithFloat32Data:self.float32Data width:self.width height:self.height shouldCopy:YES]; From c5b1edd709fedfac5ce86946a322866a89757d25 Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Fri, 16 Jun 2023 13:08:25 -0700 Subject: [PATCH 080/106] Add "exports" field definitions to package.json Fixes https://github.com/google/mediapipe/issues/4547 PiperOrigin-RevId: 540977469 --- mediapipe/tasks/web/package.json | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/mediapipe/tasks/web/package.json b/mediapipe/tasks/web/package.json index 4a42018f0..025ab46bd 100644 --- a/mediapipe/tasks/web/package.json +++ b/mediapipe/tasks/web/package.json @@ -5,6 +5,11 @@ "main": "__NAME___bundle.cjs", "browser": "__NAME___bundle.mjs", "module": "__NAME___bundle.mjs", + "exports": { + "import": "./__NAME___bundle.mjs", + "require": "./__NAME___bundle.cjs", + "default": "./__NAME___bundle.mjs" + }, "author": "mediapipe@google.com", "license": "Apache-2.0", "type": "module", From 80208079d260a7bc13a1d060fe0ec5d74ce89edf Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Fri, 16 Jun 2023 16:43:44 -0700 Subject: [PATCH 081/106] Use `GFile` for internal file systems. PiperOrigin-RevId: 541041972 --- .../python/text/text_classifier/text_classifier.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/mediapipe/model_maker/python/text/text_classifier/text_classifier.py b/mediapipe/model_maker/python/text/text_classifier/text_classifier.py index c3dd48be8..a6762176b 100644 --- a/mediapipe/model_maker/python/text/text_classifier/text_classifier.py +++ b/mediapipe/model_maker/python/text/text_classifier/text_classifier.py @@ -161,9 +161,8 @@ class TextClassifier(classifier.Classifier): path is {self._hparams.export_dir}/{model_name}. quantization_config: The configuration for model quantization. """ - if not tf.io.gfile.exists(self._hparams.export_dir): - tf.io.gfile.makedirs(self._hparams.export_dir) tflite_file = os.path.join(self._hparams.export_dir, model_name) + tf.io.gfile.makedirs(os.path.dirname(tflite_file)) metadata_file = os.path.join(self._hparams.export_dir, "metadata.json") tflite_model = model_util.convert_to_tflite( @@ -174,7 +173,7 @@ class TextClassifier(classifier.Classifier): writer = self._get_metadata_writer(tflite_model, vocab_filepath) tflite_model_with_metadata, metadata_json = writer.populate() model_util.save_tflite(tflite_model_with_metadata, tflite_file) - with open(metadata_file, "w") as f: + with tf.io.gfile.GFile(metadata_file, "w") as f: f.write(metadata_json) @abc.abstractmethod From 35c79b755e50a502fc75769983f3d404ef1f957f Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Fri, 16 Jun 2023 17:43:58 -0700 Subject: [PATCH 082/106] update face drawing function. PiperOrigin-RevId: 541055040 --- mediapipe/util/pose_util.cc | 63 ++++++++++++++++++++++++++++++------- mediapipe/util/pose_util.h | 1 + 2 files changed, 53 insertions(+), 11 deletions(-) diff --git a/mediapipe/util/pose_util.cc b/mediapipe/util/pose_util.cc index 3a9c1e97b..79e3f791f 100644 --- a/mediapipe/util/pose_util.cc +++ b/mediapipe/util/pose_util.cc @@ -108,9 +108,23 @@ const int kFaceMeshFaceOval[36][2] = { {172, 58}, {58, 132}, {132, 93}, {93, 234}, {234, 127}, {127, 162}, {162, 21}, {21, 54}, {54, 103}, {103, 67}, {67, 109}, {109, 10}}; -const cv::Scalar kRightEyeColor = cv::Scalar(255.0, 48.0, 48.0); -const cv::Scalar kLeftEyeColor = cv::Scalar(48.0, 255.0, 48.0); -const cv::Scalar kFaceContourColor = cv::Scalar(224.0, 224.0, 224.0); +const int kFaceMeshNose[25][2] = { + {168, 6}, {6, 197}, {197, 195}, {195, 5}, {5, 4}, + {4, 1}, {1, 19}, {19, 94}, {94, 2}, {98, 97}, + {97, 2}, {2, 326}, {326, 327}, {327, 294}, {294, 278}, + {278, 344}, {344, 440}, {440, 275}, {275, 4}, {4, 45}, + {45, 220}, {220, 115}, {115, 48}, {48, 64}, {64, 98}}; + +const cv::Scalar kRedColor = cv::Scalar{255, 48, 48}; +const cv::Scalar kGreenColor = cv::Scalar{48, 255, 48}; +const cv::Scalar kBlueColor = cv::Scalar{21, 101, 192}; +const cv::Scalar kYellowColor = cv::Scalar{255, 204, 0}; +const cv::Scalar kGrayColor = cv::Scalar{128, 128, 128}; +const cv::Scalar kPurpleColor = cv::Scalar{128, 64, 128}; +const cv::Scalar kPeachColor = cv::Scalar{255, 229, 180}; +const cv::Scalar kWhiteColor = cv::Scalar(224, 224, 224); +const cv::Scalar kCyanColor = cv::Scalar{48, 255, 192}; +const cv::Scalar kMagentaColor = cv::Scalar{255, 48, 192}; } // namespace namespace mediapipe { @@ -172,6 +186,7 @@ void DrawPose(const mediapipe::NormalizedLandmarkList& pose, bool flip_y, } void DrawFace(const mediapipe::NormalizedLandmarkList& face, bool flip_y, + bool draw_nose, bool color_style, int draw_line_width, cv::Mat* image) { const int target_width = image->cols; const int target_height = image->rows; @@ -181,17 +196,36 @@ void DrawFace(const mediapipe::NormalizedLandmarkList& face, bool flip_y, (flip_y ? 1.0f - lm.y() : lm.y()) * target_height); } - constexpr int draw_line_width = 2; + cv::Scalar kFaceOvalColor = kWhiteColor; + cv::Scalar kLipsColor = kWhiteColor; + cv::Scalar kLeftEyeColor = kGreenColor; + cv::Scalar kLeftEyebrowColor = kGreenColor; + cv::Scalar kLeftEyeIrisColor = kGreenColor; + cv::Scalar kRightEyeColor = kRedColor; + cv::Scalar kRightEyebrowColor = kRedColor; + cv::Scalar kRightEyeIrisColor = kRedColor; + cv::Scalar kNoseColor = kWhiteColor; + if (color_style) { + kFaceOvalColor = kWhiteColor; + kLipsColor = kBlueColor; + kLeftEyeColor = kCyanColor; + kLeftEyebrowColor = kGreenColor; + kLeftEyeIrisColor = kGreenColor; + kRightEyeColor = kMagentaColor; + kRightEyebrowColor = kRedColor; + kRightEyeIrisColor = kRedColor; + kNoseColor = kYellowColor; + } + for (int j = 0; j < 36; ++j) { cv::line(*image, landmarks[kFaceMeshFaceOval[j][0]], - landmarks[kFaceMeshFaceOval[j][1]], kFaceContourColor, + landmarks[kFaceMeshFaceOval[j][1]], kFaceOvalColor, draw_line_width); } for (int j = 0; j < 40; ++j) { cv::line(*image, landmarks[kFaceMeshLips[j][0]], - landmarks[kFaceMeshLips[j][1]], kFaceContourColor, - draw_line_width); + landmarks[kFaceMeshLips[j][1]], kLipsColor, draw_line_width); } for (int j = 0; j < 16; ++j) { @@ -201,13 +235,13 @@ void DrawFace(const mediapipe::NormalizedLandmarkList& face, bool flip_y, for (int j = 0; j < 8; ++j) { cv::line(*image, landmarks[kFaceMeshLeftEyebrow[j][0]], - landmarks[kFaceMeshLeftEyebrow[j][1]], kLeftEyeColor, + landmarks[kFaceMeshLeftEyebrow[j][1]], kLeftEyebrowColor, draw_line_width); } for (int j = 0; j < 4; ++j) { cv::line(*image, landmarks[kFaceMeshLeftIris[j][0]], - landmarks[kFaceMeshLeftIris[j][1]], kLeftEyeColor, + landmarks[kFaceMeshLeftIris[j][1]], kLeftEyeIrisColor, draw_line_width); } @@ -219,14 +253,21 @@ void DrawFace(const mediapipe::NormalizedLandmarkList& face, bool flip_y, for (int j = 0; j < 8; ++j) { cv::line(*image, landmarks[kFaceMeshRightEyebrow[j][0]], - landmarks[kFaceMeshRightEyebrow[j][1]], kRightEyeColor, + landmarks[kFaceMeshRightEyebrow[j][1]], kRightEyebrowColor, draw_line_width); } for (int j = 0; j < 4; ++j) { cv::line(*image, landmarks[kFaceMeshRightIris[j][0]], - landmarks[kFaceMeshRightIris[j][1]], kRightEyeColor, + landmarks[kFaceMeshRightIris[j][1]], kRightEyeIrisColor, draw_line_width); } + + if (draw_nose) { + for (int j = 0; j < 25; ++j) { + cv::line(*image, landmarks[kFaceMeshNose[j][0]], + landmarks[kFaceMeshNose[j][1]], kNoseColor, draw_line_width); + } + } } } // namespace mediapipe diff --git a/mediapipe/util/pose_util.h b/mediapipe/util/pose_util.h index ed271e2ea..b4e517187 100644 --- a/mediapipe/util/pose_util.h +++ b/mediapipe/util/pose_util.h @@ -24,6 +24,7 @@ void DrawPose(const mediapipe::NormalizedLandmarkList& pose, bool flip_y, cv::Mat* image); void DrawFace(const mediapipe::NormalizedLandmarkList& face, bool flip_y, + bool draw_nose, bool color_style, int draw_line_width, cv::Mat* image); } // namespace mediapipe From 86bc764b6e1510387fd4976d24e8ecc1baff73a0 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 20 Jun 2023 09:14:39 -0700 Subject: [PATCH 083/106] This will fix typos in tasks internal files. PiperOrigin-RevId: 541945726 --- .../face_detector/MPPFaceDetectorTests.mm | 18 +++++++++--------- .../face_landmarker/MPPFaceLandmarkerTests.mm | 8 ++++---- .../MPPGestureRecognizerTests.m | 4 ++-- .../image_classifier/MPPImageClassifierTests.m | 4 ++-- .../vision/core/sources/MPPVisionTaskRunner.mm | 2 +- .../sources/MPPFaceLandmarkerResult+Helpers.h | 2 +- .../vision/image_segmenter/image_segmenter.ts | 2 +- .../interactive_segmenter.ts | 4 ++-- .../vision/pose_landmarker/pose_landmarker.ts | 8 ++++---- 9 files changed, 26 insertions(+), 26 deletions(-) diff --git a/mediapipe/tasks/ios/test/vision/face_detector/MPPFaceDetectorTests.mm b/mediapipe/tasks/ios/test/vision/face_detector/MPPFaceDetectorTests.mm index ea0664409..1976bf603 100644 --- a/mediapipe/tasks/ios/test/vision/face_detector/MPPFaceDetectorTests.mm +++ b/mediapipe/tasks/ios/test/vision/face_detector/MPPFaceDetectorTests.mm @@ -155,12 +155,12 @@ static const float kKeypointErrorThreshold = 1e-2; NSInteger iterationCount = 100; // Because of flow limiting, the callback might be invoked fewer than `iterationCount` times. An - // normal expectation will fail if expectation.fullfill() is not called + // normal expectation will fail if expectation.fulfill() is not called // `expectation.expectedFulfillmentCount` times. If `expectation.isInverted = true`, the test will - // only succeed if expectation is not fullfilled for the specified `expectedFulfillmentCount`. + // only succeed if expectation is not fulfilled for the specified `expectedFulfillmentCount`. // Since it is not possible to predict how many times the expectation is supposed to be - // fullfilled, `expectation.expectedFulfillmentCount` = `iterationCount` + 1 and - // `expectation.isInverted = true` ensures that test succeeds if expectation is fullfilled <= + // fulfilled, `expectation.expectedFulfillmentCount` = `iterationCount` + 1 and + // `expectation.isInverted = true` ensures that test succeeds if expectation is fulfilled <= // `iterationCount` times. XCTestExpectation *expectation = [[XCTestExpectation alloc] initWithDescription:@"detectWithOutOfOrderTimestampsAndLiveStream"]; @@ -385,13 +385,13 @@ static const float kKeypointErrorThreshold = 1e-2; NSInteger iterationCount = 100; // Because of flow limiting, the callback might be invoked fewer than `iterationCount` times. An - // normal expectation will fail if expectation.fullfill() is not called times. An normal - // expectation will fail if expectation.fullfill() is not called + // normal expectation will fail if expectation.fulfill() is not called times. An normal + // expectation will fail if expectation.fulfill() is not called // `expectation.expectedFulfillmentCount` times. If `expectation.isInverted = true`, the test will - // only succeed if expectation is not fullfilled for the specified `expectedFulfillmentCount`. + // only succeed if expectation is not fulfilled for the specified `expectedFulfillmentCount`. // Since it it not possible to determine how many times the expectation is supposed to be - // fullfilled, `expectation.expectedFulfillmentCount` = `iterationCount` + 1 and - // `expectation.isInverted = true` ensures that test succeeds if expectation is fullfilled <= + // fulfilled, `expectation.expectedFulfillmentCount` = `iterationCount` + 1 and + // `expectation.isInverted = true` ensures that test succeeds if expectation is fulfilled <= // `iterationCount` times. XCTestExpectation *expectation = [[XCTestExpectation alloc] initWithDescription:@"detectWithOutOfOrderTimestampsAndLiveStream"]; diff --git a/mediapipe/tasks/ios/test/vision/face_landmarker/MPPFaceLandmarkerTests.mm b/mediapipe/tasks/ios/test/vision/face_landmarker/MPPFaceLandmarkerTests.mm index f1d6033a8..3ebc89466 100644 --- a/mediapipe/tasks/ios/test/vision/face_landmarker/MPPFaceLandmarkerTests.mm +++ b/mediapipe/tasks/ios/test/vision/face_landmarker/MPPFaceLandmarkerTests.mm @@ -174,12 +174,12 @@ constexpr float kFacialTransformationMatrixErrorThreshold = 0.2f; NSInteger iterationCount = 100; // Because of flow limiting, the callback might be invoked fewer than `iterationCount` times. An - // normal expectation will fail if expectation.fullfill() is not called + // normal expectation will fail if expectation.fulfill() is not called // `expectation.expectedFulfillmentCount` times. If `expectation.isInverted = true`, the test will - // only succeed if expectation is not fullfilled for the specified `expectedFulfillmentCount`. + // only succeed if expectation is not fulfilled for the specified `expectedFulfillmentCount`. // Since it is not possible to predict how many times the expectation is supposed to be - // fullfilled, `expectation.expectedFulfillmentCount` = `iterationCount` + 1 and - // `expectation.isInverted = true` ensures that test succeeds if expectation is fullfilled <= + // fulfilled, `expectation.expectedFulfillmentCount` = `iterationCount` + 1 and + // `expectation.isInverted = true` ensures that test succeeds if expectation is fulfilled <= // `iterationCount` times. XCTestExpectation *expectation = [[XCTestExpectation alloc] initWithDescription:@"detectWithOutOfOrderTimestampsAndLiveStream"]; diff --git a/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m b/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m index 6bbcf9b10..8fbcb6b49 100644 --- a/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m +++ b/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m @@ -654,9 +654,9 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; // times. An normal expectation will fail if expectation.fulfill() is not called // `expectation.expectedFulfillmentCount` times. If `expectation.isInverted = true`, the test will // only succeed if expectation is not fulfilled for the specified `expectedFulfillmentCount`. - // Since in our case we cannot predict how many times the expectation is supposed to be fullfilled + // Since in our case we cannot predict how many times the expectation is supposed to be fulfilled // setting, `expectation.expectedFulfillmentCount` = `iterationCount` + 1 and - // `expectation.isInverted = true` ensures that test succeeds ifexpectation is fullfilled <= + // `expectation.isInverted = true` ensures that test succeeds ifexpectation is fulfilled <= // `iterationCount` times. XCTestExpectation *expectation = [[XCTestExpectation alloc] initWithDescription:@"recognizeWithLiveStream"]; diff --git a/mediapipe/tasks/ios/test/vision/image_classifier/MPPImageClassifierTests.m b/mediapipe/tasks/ios/test/vision/image_classifier/MPPImageClassifierTests.m index 59383dad6..c08976923 100644 --- a/mediapipe/tasks/ios/test/vision/image_classifier/MPPImageClassifierTests.m +++ b/mediapipe/tasks/ios/test/vision/image_classifier/MPPImageClassifierTests.m @@ -673,10 +673,10 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; // If `expectation.isInverted = true`, the test will only succeed if // expectation is not fulfilled for the specified `expectedFulfillmentCount`. // Since in our case we cannot predict how many times the expectation is - // supposed to be fullfilled setting, + // supposed to be fulfilled setting, // `expectation.expectedFulfillmentCount` = `iterationCount` + 1 and // `expectation.isInverted = true` ensures that test succeeds if - // expectation is fullfilled <= `iterationCount` times. + // expectation is fulfilled <= `iterationCount` times. XCTestExpectation *expectation = [[XCTestExpectation alloc] initWithDescription:@"classifyWithLiveStream"]; diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunner.mm b/mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunner.mm index c1b5d0587..cba8a63ff 100644 --- a/mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunner.mm +++ b/mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunner.mm @@ -165,7 +165,7 @@ static NSString *const kTaskPrefix = @"com.mediapipe.tasks.vision"; // For 90° and 270° rotations, we need to swap width and height. // This is due to the internal behavior of ImageToTensorCalculator, which: // - first denormalizes the provided rect by multiplying the rect width or height by the image - // width or height, repectively. + // width or height, respectively. // - then rotates this by denormalized rect by the provided rotation, and uses this for cropping, // - then finally rotates this back. if (rotationDegrees % 180 == 0) { diff --git a/mediapipe/tasks/ios/vision/face_landmarker/utils/sources/MPPFaceLandmarkerResult+Helpers.h b/mediapipe/tasks/ios/vision/face_landmarker/utils/sources/MPPFaceLandmarkerResult+Helpers.h index 422e1bf07..b27bd2676 100644 --- a/mediapipe/tasks/ios/vision/face_landmarker/utils/sources/MPPFaceLandmarkerResult+Helpers.h +++ b/mediapipe/tasks/ios/vision/face_landmarker/utils/sources/MPPFaceLandmarkerResult+Helpers.h @@ -32,7 +32,7 @@ NS_ASSUME_NONNULL_BEGIN * @param transformationMatrixesPacket a MediaPipe packet wrapping a * `std::vector`. * - * @return An `MPPFaceLandmarkerResult` object that contains the contenst of the provided packets. + * @return An `MPPFaceLandmarkerResult` object that contains the contents of the provided packets. */ + (MPPFaceLandmarkerResult *) faceLandmarkerResultWithLandmarksPacket:(const ::mediapipe::Packet &)landmarksPacket diff --git a/mediapipe/tasks/web/vision/image_segmenter/image_segmenter.ts b/mediapipe/tasks/web/vision/image_segmenter/image_segmenter.ts index 6d295aaa8..2f35f6676 100644 --- a/mediapipe/tasks/web/vision/image_segmenter/image_segmenter.ts +++ b/mediapipe/tasks/web/vision/image_segmenter/image_segmenter.ts @@ -308,7 +308,7 @@ export class ImageSegmenter extends VisionTaskRunner { /** * Performs image segmentation on the provided video frame and returns the * segmentation result. This method creates a copy of the resulting masks and - * should not be used in high-throughput applictions. Only use this method + * should not be used in high-throughput applications. Only use this method * when the ImageSegmenter is created with running mode `video`. * * @param videoFrame A video frame to process. diff --git a/mediapipe/tasks/web/vision/interactive_segmenter/interactive_segmenter.ts b/mediapipe/tasks/web/vision/interactive_segmenter/interactive_segmenter.ts index 662eaf09a..acd7265c2 100644 --- a/mediapipe/tasks/web/vision/interactive_segmenter/interactive_segmenter.ts +++ b/mediapipe/tasks/web/vision/interactive_segmenter/interactive_segmenter.ts @@ -230,7 +230,7 @@ export class InteractiveSegmenter extends VisionTaskRunner { /** * Performs interactive segmentation on the provided video frame and returns * the segmentation result. This method creates a copy of the resulting masks - * and should not be used in high-throughput applictions. The `roi` parameter + * and should not be used in high-throughput applications. The `roi` parameter * is used to represent a user's region of interest for segmentation. * * @param image An image to process. @@ -243,7 +243,7 @@ export class InteractiveSegmenter extends VisionTaskRunner { /** * Performs interactive segmentation on the provided video frame and returns * the segmentation result. This method creates a copy of the resulting masks - * and should not be used in high-throughput applictions. The `roi` parameter + * and should not be used in high-throughput applications. The `roi` parameter * is used to represent a user's region of interest for segmentation. * * The 'image_processing_options' parameter can be used to specify the diff --git a/mediapipe/tasks/web/vision/pose_landmarker/pose_landmarker.ts b/mediapipe/tasks/web/vision/pose_landmarker/pose_landmarker.ts index 927b3c24b..d2cb9234d 100644 --- a/mediapipe/tasks/web/vision/pose_landmarker/pose_landmarker.ts +++ b/mediapipe/tasks/web/vision/pose_landmarker/pose_landmarker.ts @@ -233,7 +233,7 @@ export class PoseLandmarker extends VisionTaskRunner { /** * Performs pose detection on the provided single image and waits * synchronously for the response. This method creates a copy of the resulting - * masks and should not be used in high-throughput applictions. Only + * masks and should not be used in high-throughput applications. Only * use this method when the PoseLandmarker is created with running mode * `image`. * @@ -246,7 +246,7 @@ export class PoseLandmarker extends VisionTaskRunner { /** * Performs pose detection on the provided single image and waits * synchronously for the response. This method creates a copy of the resulting - * masks and should not be used in high-throughput applictions. Only + * masks and should not be used in high-throughput applications. Only * use this method when the PoseLandmarker is created with running mode * `image`. * @@ -311,7 +311,7 @@ export class PoseLandmarker extends VisionTaskRunner { /** * Performs pose detection on the provided video frame and returns the result. * This method creates a copy of the resulting masks and should not be used - * in high-throughput applictions. Only use this method when the + * in high-throughput applications. Only use this method when the * PoseLandmarker is created with running mode `video`. * * @param videoFrame A video frame to process. @@ -324,7 +324,7 @@ export class PoseLandmarker extends VisionTaskRunner { /** * Performs pose detection on the provided video frame and returns the result. * This method creates a copy of the resulting masks and should not be used - * in high-throughput applictions. The method returns synchronously once the + * in high-throughput applications. The method returns synchronously once the * callback returns. Only use this method when the PoseLandmarker is created * with running mode `video`. * From ef6aeb88285152b88287c3a4db44a71d9ce77eb8 Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Tue, 20 Jun 2023 14:36:55 -0700 Subject: [PATCH 084/106] Allow passing of HParams to MediaPipe training docker PiperOrigin-RevId: 542052304 --- mediapipe/model_maker/python/core/BUILD | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/mediapipe/model_maker/python/core/BUILD b/mediapipe/model_maker/python/core/BUILD index 0ed20a2fe..a73e545d3 100644 --- a/mediapipe/model_maker/python/core/BUILD +++ b/mediapipe/model_maker/python/core/BUILD @@ -14,7 +14,10 @@ # Placeholder for internal Python strict library and test compatibility macro. -package(default_visibility = ["//mediapipe:__subpackages__"]) +package(default_visibility = [ + "//cloud/ml/applications/vision/model_garden/model_oss/mediapipe:__subpackages__", + "//mediapipe:__subpackages__", +]) licenses(["notice"]) From 0b6ff84e3c9e211d0d9f664dff3f7f020037ff15 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 20 Jun 2023 16:31:48 -0700 Subject: [PATCH 085/106] update face drawing function. PiperOrigin-RevId: 542083042 --- mediapipe/util/pose_util.cc | 22 ++++++++++++++++++++-- mediapipe/util/pose_util.h | 4 ++-- 2 files changed, 22 insertions(+), 4 deletions(-) diff --git a/mediapipe/util/pose_util.cc b/mediapipe/util/pose_util.cc index 79e3f791f..61663ba55 100644 --- a/mediapipe/util/pose_util.cc +++ b/mediapipe/util/pose_util.cc @@ -125,6 +125,12 @@ const cv::Scalar kPeachColor = cv::Scalar{255, 229, 180}; const cv::Scalar kWhiteColor = cv::Scalar(224, 224, 224); const cv::Scalar kCyanColor = cv::Scalar{48, 255, 192}; const cv::Scalar kMagentaColor = cv::Scalar{255, 48, 192}; + +void ReverseRGB(cv::Scalar* color) { + int tmp = color->val[0]; + color->val[0] = color->val[2]; + color->val[2] = tmp; +} } // namespace namespace mediapipe { @@ -186,8 +192,8 @@ void DrawPose(const mediapipe::NormalizedLandmarkList& pose, bool flip_y, } void DrawFace(const mediapipe::NormalizedLandmarkList& face, bool flip_y, - bool draw_nose, bool color_style, int draw_line_width, - cv::Mat* image) { + bool draw_nose, bool color_style, bool reverse_color, + int draw_line_width, cv::Mat* image) { const int target_width = image->cols; const int target_height = image->rows; std::vector landmarks; @@ -217,6 +223,18 @@ void DrawFace(const mediapipe::NormalizedLandmarkList& face, bool flip_y, kNoseColor = kYellowColor; } + if (reverse_color) { + ReverseRGB(&kFaceOvalColor); + ReverseRGB(&kLipsColor); + ReverseRGB(&kLeftEyeColor); + ReverseRGB(&kLeftEyebrowColor); + ReverseRGB(&kLeftEyeIrisColor); + ReverseRGB(&kRightEyeColor); + ReverseRGB(&kRightEyebrowColor); + ReverseRGB(&kRightEyeIrisColor); + ReverseRGB(&kNoseColor); + } + for (int j = 0; j < 36; ++j) { cv::line(*image, landmarks[kFaceMeshFaceOval[j][0]], landmarks[kFaceMeshFaceOval[j][1]], kFaceOvalColor, diff --git a/mediapipe/util/pose_util.h b/mediapipe/util/pose_util.h index b4e517187..d94e22cbe 100644 --- a/mediapipe/util/pose_util.h +++ b/mediapipe/util/pose_util.h @@ -24,8 +24,8 @@ void DrawPose(const mediapipe::NormalizedLandmarkList& pose, bool flip_y, cv::Mat* image); void DrawFace(const mediapipe::NormalizedLandmarkList& face, bool flip_y, - bool draw_nose, bool color_style, int draw_line_width, - cv::Mat* image); + bool draw_nose, bool color_style, bool reverse_color, + int draw_line_width, cv::Mat* image); } // namespace mediapipe From 7edb6b8fcb5a99a0bb99eaa0fc790e2020c4b82e Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 20 Jun 2023 16:37:13 -0700 Subject: [PATCH 086/106] add concatenate image vector calculator PiperOrigin-RevId: 542084345 --- mediapipe/calculators/core/BUILD | 1 + mediapipe/calculators/core/concatenate_vector_calculator.cc | 4 ++++ 2 files changed, 5 insertions(+) diff --git a/mediapipe/calculators/core/BUILD b/mediapipe/calculators/core/BUILD index d3e63e38f..a425b7e38 100644 --- a/mediapipe/calculators/core/BUILD +++ b/mediapipe/calculators/core/BUILD @@ -289,6 +289,7 @@ cc_library( "//mediapipe/framework/api2:node", "//mediapipe/framework/api2:port", "//mediapipe/framework/formats:classification_cc_proto", + "//mediapipe/framework/formats:image", "//mediapipe/framework/formats:landmark_cc_proto", "//mediapipe/framework/formats:tensor", "//mediapipe/framework/port:integral_types", diff --git a/mediapipe/calculators/core/concatenate_vector_calculator.cc b/mediapipe/calculators/core/concatenate_vector_calculator.cc index 4d0d66206..53b3debf1 100644 --- a/mediapipe/calculators/core/concatenate_vector_calculator.cc +++ b/mediapipe/calculators/core/concatenate_vector_calculator.cc @@ -17,6 +17,7 @@ #include #include "mediapipe/framework/formats/classification.pb.h" +#include "mediapipe/framework/formats/image.h" #include "mediapipe/framework/formats/landmark.pb.h" #include "mediapipe/framework/formats/tensor.h" #include "mediapipe/framework/port/integral_types.h" @@ -104,4 +105,7 @@ typedef ConcatenateVectorCalculator ConcatenateRenderDataVectorCalculator; MEDIAPIPE_REGISTER_NODE(ConcatenateRenderDataVectorCalculator); +typedef ConcatenateVectorCalculator + ConcatenateImageVectorCalculator; +MEDIAPIPE_REGISTER_NODE(ConcatenateImageVectorCalculator); } // namespace mediapipe From 895c685df6ee4eeb9fce5ccdb32c1dbeab2334a6 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 21 Jun 2023 15:15:30 -0700 Subject: [PATCH 087/106] 1. Model maker core classifier change _metric_function field to _metric_functions in order to support having multiple metrics. 2. Add SparsePrecision, SparseRecall, BinarySparsePrecisionAtRecall, and BinarySparseRecallAtPrecision to the shared metrics library. 3. Add SparsePrecision, SparseRecall to text classifier, and have the option to evaluate the model with BinarySparsePrecisionAtRecall and BinarySparseRecallAtPrecision PiperOrigin-RevId: 542376451 --- .../python/core/tasks/classifier.py | 5 +- mediapipe/model_maker/python/core/utils/BUILD | 11 ++ .../model_maker/python/core/utils/metrics.py | 104 ++++++++++++++++++ .../python/core/utils/metrics_test.py | 74 +++++++++++++ .../python/text/text_classifier/BUILD | 1 + .../text/text_classifier/text_classifier.py | 53 ++++++++- .../gesture_recognizer/gesture_recognizer.py | 2 +- .../image_classifier/image_classifier.py | 2 +- 8 files changed, 243 insertions(+), 9 deletions(-) create mode 100644 mediapipe/model_maker/python/core/utils/metrics.py create mode 100644 mediapipe/model_maker/python/core/utils/metrics_test.py diff --git a/mediapipe/model_maker/python/core/tasks/classifier.py b/mediapipe/model_maker/python/core/tasks/classifier.py index 60c00f0de..a042c0ec7 100644 --- a/mediapipe/model_maker/python/core/tasks/classifier.py +++ b/mediapipe/model_maker/python/core/tasks/classifier.py @@ -43,7 +43,7 @@ class Classifier(custom_model.CustomModel): self._model: tf.keras.Model = None self._optimizer: Union[str, tf.keras.optimizers.Optimizer] = None self._loss_function: Union[str, tf.keras.losses.Loss] = None - self._metric_function: Union[str, tf.keras.metrics.Metric] = None + self._metric_functions: Sequence[Union[str, tf.keras.metrics.Metric]] = None self._callbacks: Sequence[tf.keras.callbacks.Callback] = None self._hparams: hp.BaseHParams = None self._history: tf.keras.callbacks.History = None @@ -92,7 +92,8 @@ class Classifier(custom_model.CustomModel): self._model.compile( optimizer=self._optimizer, loss=self._loss_function, - metrics=[self._metric_function]) + metrics=self._metric_functions, + ) latest_checkpoint = ( tf.train.latest_checkpoint(checkpoint_path) diff --git a/mediapipe/model_maker/python/core/utils/BUILD b/mediapipe/model_maker/python/core/utils/BUILD index ef9cab290..81bd68d3e 100644 --- a/mediapipe/model_maker/python/core/utils/BUILD +++ b/mediapipe/model_maker/python/core/utils/BUILD @@ -80,6 +80,17 @@ py_test( deps = [":loss_functions"], ) +py_library( + name = "metrics", + srcs = ["metrics.py"], +) + +py_test( + name = "metrics_test", + srcs = ["metrics_test.py"], + deps = [":metrics"], +) + py_library( name = "quantization", srcs = ["quantization.py"], diff --git a/mediapipe/model_maker/python/core/utils/metrics.py b/mediapipe/model_maker/python/core/utils/metrics.py new file mode 100644 index 000000000..310146168 --- /dev/null +++ b/mediapipe/model_maker/python/core/utils/metrics.py @@ -0,0 +1,104 @@ +# Copyright 2023 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Metrics utility library.""" + +import tensorflow as tf + + +def _get_binary_sparse_metric(metric: tf.metrics.Metric): + """Helper method to create a BinarySparse version of a tf.keras.Metric. + + BinarySparse is an implementation where the update_state(y_true, y_pred) takes + in shapes y_true=(batch_size, 1) y_pred=(batch_size, 2). Note that this only + supports the binary classification case, and that class_id=0 is the negative + class and class_id=1 is the positive class. + + Currently supported tf.metric.Metric classes + 1. BinarySparseRecallAtPrecision + 2. BinarySparsePrecisionAtRecall + + Args: + metric: A tf.metric.Metric class for which we want to generate a + BinarySparse version of this metric. + + Returns: + A class for the BinarySparse version of the specified tf.metrics.Metric + """ + + class BinarySparseMetric(metric): + """A BinarySparse wrapper class for a tf.keras.Metric. + + This class has the same parameters and functions as the underlying + metric class. For example, the parameters for BinarySparseRecallAtPrecision + is the same as tf.keras.metrics.RecallAtPrecision. The only new constraint + is that class_id must be set to 1 (or not specified) for the Binary metric. + """ + + def __init__(self, *args, **kwargs): + if 'class_id' in kwargs and kwargs['class_id'] != 1: + raise ValueError( + f'Custom BinarySparseMetric for class:{metric.__name__} is ' + 'only supported for class_id=1, got class_id=' + f'{kwargs["class_id"]} instead' + ) + else: + kwargs['class_id'] = 1 + super().__init__(*args, **kwargs) + + def update_state(self, y_true, y_pred, sample_weight=None): + y_true = tf.cast(tf.reshape(y_true, [-1]), tf.int32) + y_true_one_hot = tf.one_hot(y_true, 2) + return super().update_state( + y_true_one_hot, y_pred, sample_weight=sample_weight + ) + + return BinarySparseMetric + + +def _get_sparse_metric(metric: tf.metrics.Metric): + """Helper method to create a Sparse version of a tf.keras.Metric. + + Sparse is an implementation where the update_state(y_true, y_pred) takes in + shapes y_true=(batch_size, 1) and y_pred=(batch_size, num_classes). + + Currently supported tf.metrics.Metric classes: + 1. tf.metrics.Recall + 2. tf.metrics.Precision + + Args: + metric: A tf.metric.Metric class for which we want to generate a Sparse + version of this metric. + + Returns: + A class for the Sparse version of the specified tf.keras.Metric. + """ + + class SparseMetric(metric): + """A Sparse wrapper class for a tf.keras.Metric.""" + + def update_state(self, y_true, y_pred, sample_weight=None): + y_pred = tf.math.argmax(y_pred, axis=-1) + return super().update_state(y_true, y_pred, sample_weight=sample_weight) + + return SparseMetric + + +SparseRecall = _get_sparse_metric(tf.metrics.Recall) +SparsePrecision = _get_sparse_metric(tf.metrics.Precision) +BinarySparseRecallAtPrecision = _get_binary_sparse_metric( + tf.metrics.RecallAtPrecision +) +BinarySparsePrecisionAtRecall = _get_binary_sparse_metric( + tf.metrics.PrecisionAtRecall +) diff --git a/mediapipe/model_maker/python/core/utils/metrics_test.py b/mediapipe/model_maker/python/core/utils/metrics_test.py new file mode 100644 index 000000000..842335273 --- /dev/null +++ b/mediapipe/model_maker/python/core/utils/metrics_test.py @@ -0,0 +1,74 @@ +# Copyright 2023 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from absl.testing import parameterized +import tensorflow as tf + +from mediapipe.model_maker.python.core.utils import metrics + + +class SparseMetricTest(tf.test.TestCase, parameterized.TestCase): + + def setUp(self): + super().setUp() + self.y_true = [0, 0, 1, 1, 0, 1] + self.y_pred = [ + [0.9, 0.1], # 0, 0 y + [0.8, 0.2], # 0, 0 y + [0.7, 0.3], # 0, 1 n + [0.6, 0.4], # 0, 1 n + [0.3, 0.7], # 1, 0 y + [0.3, 0.7], # 1, 1 y + ] + self.num_classes = 3 + + def _assert_metric_equals(self, metric, value): + metric.update_state(self.y_true, self.y_pred) + self.assertEqual(metric.result(), value) + + def test_sparse_recall(self): + metric = metrics.SparseRecall() + self._assert_metric_equals(metric, 1 / 3) + + def test_sparse_precision(self): + metric = metrics.SparsePrecision() + self._assert_metric_equals(metric, 1 / 2) + + def test_binary_sparse_recall_at_precision(self): + metric = metrics.BinarySparseRecallAtPrecision(1.0) + self._assert_metric_equals(metric, 0.0) # impossible to achieve precision=1 + metric = metrics.BinarySparseRecallAtPrecision(0.4) + self._assert_metric_equals(metric, 1.0) + + def test_binary_sparse_precision_at_recall(self): + metric = metrics.BinarySparsePrecisionAtRecall(1.0) + self._assert_metric_equals(metric, 3 / 4) + metric = metrics.BinarySparsePrecisionAtRecall(0.7) + self._assert_metric_equals(metric, 3 / 4) + + def test_binary_sparse_precision_at_recall_class_id_error(self): + # class_id=1 case should not error + _ = metrics.BinarySparsePrecisionAtRecall(1.0, class_id=1) + # class_id=2 case should error + with self.assertRaisesRegex( + ValueError, + 'Custom BinarySparseMetric for class:PrecisionAtRecall is only' + ' supported for class_id=1, got class_id=2 instead', + ): + _ = metrics.BinarySparsePrecisionAtRecall(1.0, class_id=2) + + +if __name__ == '__main__': + tf.test.main() diff --git a/mediapipe/model_maker/python/text/text_classifier/BUILD b/mediapipe/model_maker/python/text/text_classifier/BUILD index 9fe96849b..26412d2cb 100644 --- a/mediapipe/model_maker/python/text/text_classifier/BUILD +++ b/mediapipe/model_maker/python/text/text_classifier/BUILD @@ -118,6 +118,7 @@ py_library( "//mediapipe/model_maker/python/core:hyperparameters", "//mediapipe/model_maker/python/core/data:dataset", "//mediapipe/model_maker/python/core/tasks:classifier", + "//mediapipe/model_maker/python/core/utils:metrics", "//mediapipe/model_maker/python/core/utils:model_util", "//mediapipe/model_maker/python/core/utils:quantization", "//mediapipe/tasks/python/metadata/metadata_writers:metadata_writer", diff --git a/mediapipe/model_maker/python/text/text_classifier/text_classifier.py b/mediapipe/model_maker/python/text/text_classifier/text_classifier.py index a6762176b..59369931d 100644 --- a/mediapipe/model_maker/python/text/text_classifier/text_classifier.py +++ b/mediapipe/model_maker/python/text/text_classifier/text_classifier.py @@ -24,6 +24,7 @@ import tensorflow_hub as hub from mediapipe.model_maker.python.core import hyperparameters as hp from mediapipe.model_maker.python.core.data import dataset as ds from mediapipe.model_maker.python.core.tasks import classifier +from mediapipe.model_maker.python.core.utils import metrics from mediapipe.model_maker.python.core.utils import model_util from mediapipe.model_maker.python.core.utils import quantization from mediapipe.model_maker.python.text.text_classifier import dataset as text_ds @@ -123,12 +124,24 @@ class TextClassifier(classifier.Classifier): return text_classifier - def evaluate(self, data: ds.Dataset, batch_size: int = 32) -> Any: + def evaluate( + self, + data: ds.Dataset, + batch_size: int = 32, + desired_precisions: Optional[Sequence[float]] = None, + desired_recalls: Optional[Sequence[float]] = None, + ) -> Any: """Overrides Classifier.evaluate(). Args: data: Evaluation dataset. Must be a TextClassifier Dataset. batch_size: Number of samples per evaluation step. + desired_precisions: If specified, adds a RecallAtPrecision metric per + desired_precisions[i] entry which tracks the recall given the constraint + on precision. Only supported for binary classification. + desired_recalls: If specified, adds a PrecisionAtRecall metric per + desired_recalls[i] entry which tracks the precision given the constraint + on recall. Only supported for binary classification. Returns: The loss value and accuracy. @@ -144,6 +157,28 @@ class TextClassifier(classifier.Classifier): processed_data = self._text_preprocessor.preprocess(data) dataset = processed_data.gen_tf_dataset(batch_size, is_training=False) + + additional_metrics = [] + if desired_precisions and len(data.label_names) == 2: + for precision in desired_precisions: + additional_metrics.append( + metrics.BinarySparseRecallAtPrecision( + precision, name=f"recall_at_precision_{precision}" + ) + ) + if desired_recalls and len(data.label_names) == 2: + for recall in desired_recalls: + additional_metrics.append( + metrics.BinarySparsePrecisionAtRecall( + recall, name=f"precision_at_recall_{recall}" + ) + ) + metric_functions = self._metric_functions + additional_metrics + self._model.compile( + optimizer=self._optimizer, + loss=self._loss_function, + metrics=metric_functions, + ) return self._model.evaluate(dataset) def export_model( @@ -196,7 +231,11 @@ class _AverageWordEmbeddingClassifier(TextClassifier): super().__init__(model_spec, hparams, label_names) self._model_options = model_options self._loss_function = "sparse_categorical_crossentropy" - self._metric_function = "accuracy" + self._metric_functions = [ + "accuracy", + metrics.SparsePrecision(name="precision", dtype=tf.float32), + metrics.SparseRecall(name="recall", dtype=tf.float32), + ] self._text_preprocessor: ( preprocessor.AverageWordEmbeddingClassifierPreprocessor) = None @@ -312,9 +351,13 @@ class _BertClassifier(TextClassifier): self._model_options = model_options with self._hparams.get_strategy().scope(): self._loss_function = tf.keras.losses.SparseCategoricalCrossentropy() - self._metric_function = tf.keras.metrics.SparseCategoricalAccuracy( - "test_accuracy", dtype=tf.float32 - ) + self._metric_functions = [ + tf.keras.metrics.SparseCategoricalAccuracy( + "test_accuracy", dtype=tf.float32 + ), + metrics.SparsePrecision(name="precision", dtype=tf.float32), + metrics.SparseRecall(name="recall", dtype=tf.float32), + ] self._text_preprocessor: preprocessor.BertClassifierPreprocessor = None @classmethod diff --git a/mediapipe/model_maker/python/vision/gesture_recognizer/gesture_recognizer.py b/mediapipe/model_maker/python/vision/gesture_recognizer/gesture_recognizer.py index 66934304a..8335968b7 100644 --- a/mediapipe/model_maker/python/vision/gesture_recognizer/gesture_recognizer.py +++ b/mediapipe/model_maker/python/vision/gesture_recognizer/gesture_recognizer.py @@ -54,7 +54,7 @@ class GestureRecognizer(classifier.Classifier): self._model_options = model_options self._hparams = hparams self._loss_function = loss_functions.FocalLoss(gamma=self._hparams.gamma) - self._metric_function = 'categorical_accuracy' + self._metric_functions = ['categorical_accuracy'] self._optimizer = 'adam' self._callbacks = self._get_callbacks() self._history = None diff --git a/mediapipe/model_maker/python/vision/image_classifier/image_classifier.py b/mediapipe/model_maker/python/vision/image_classifier/image_classifier.py index 3838a5a1a..8acf59f66 100644 --- a/mediapipe/model_maker/python/vision/image_classifier/image_classifier.py +++ b/mediapipe/model_maker/python/vision/image_classifier/image_classifier.py @@ -59,7 +59,7 @@ class ImageClassifier(classifier.Classifier): self._callbacks = model_util.get_default_callbacks(self._hparams.export_dir) self._loss_function = tf.keras.losses.CategoricalCrossentropy( label_smoothing=self._hparams.label_smoothing) - self._metric_function = 'accuracy' + self._metric_functions = ['accuracy'] self._history = None # Training history returned from `keras_model.fit`. @classmethod From c86d80a03180389aa4325ceb7ca7308e7a7d7359 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 21 Jun 2023 16:00:47 -0700 Subject: [PATCH 088/106] Internal Changes PiperOrigin-RevId: 542387813 --- mediapipe/model_maker/python/core/utils/BUILD | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/mediapipe/model_maker/python/core/utils/BUILD b/mediapipe/model_maker/python/core/utils/BUILD index 81bd68d3e..2c29970bb 100644 --- a/mediapipe/model_maker/python/core/utils/BUILD +++ b/mediapipe/model_maker/python/core/utils/BUILD @@ -80,6 +80,14 @@ py_test( deps = [":loss_functions"], ) +###################################################################### +# Public target of the MediaPipe Model Maker Quantization Config. + +# Quantization Config is used to export a quantized model. Please refer +# to the specific task documentations such as: +# https://developers.google.com/mediapipe/solutions/vision/image_classifier/customize +# for usage information. +###################################################################### py_library( name = "metrics", srcs = ["metrics.py"], @@ -95,6 +103,7 @@ py_library( name = "quantization", srcs = ["quantization.py"], srcs_version = "PY3", + visibility = ["//visibility:public"], deps = ["//mediapipe/model_maker/python/core/data:dataset"], ) From 0d2548cd6533036d780c447d2f2631177095c0c2 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 21 Jun 2023 16:21:15 -0700 Subject: [PATCH 089/106] Internal change PiperOrigin-RevId: 542392817 --- mediapipe/java/com/google/mediapipe/framework/BUILD | 1 - 1 file changed, 1 deletion(-) diff --git a/mediapipe/java/com/google/mediapipe/framework/BUILD b/mediapipe/java/com/google/mediapipe/framework/BUILD index dd5f8f1da..78ae61d06 100644 --- a/mediapipe/java/com/google/mediapipe/framework/BUILD +++ b/mediapipe/java/com/google/mediapipe/framework/BUILD @@ -50,7 +50,6 @@ android_library( "MediaPipeRunner.java", ], visibility = [ - "//java/com/google/android/libraries/camera/effects:__subpackages__", "//mediapipe/java/com/google/mediapipe:__subpackages__", ], exports = [ From 825e3a8af0d9a40e9e298c172b95e15d1779da54 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 21 Jun 2023 23:01:44 -0700 Subject: [PATCH 090/106] Speed up TimeSeriesFramerCalculator. Currently, TimeSeriesFramerCalculator constructs a distinct Matrix object for every input sample, which is inefficient. This CL revises buffering to keep each input packet's worth of samples as one grouped Matrix. A benchmark is added, showing a speed up of about 20x. ``` name old new BM_TimeSeriesFramerCalculator 48.45ms 2.26ms ``` PiperOrigin-RevId: 542462618 --- mediapipe/calculators/audio/BUILD | 18 +- .../audio/time_series_framer_calculator.cc | 245 ++++++++++++------ ...time_series_framer_calculator_benchmark.cc | 92 +++++++ 3 files changed, 268 insertions(+), 87 deletions(-) create mode 100644 mediapipe/calculators/audio/time_series_framer_calculator_benchmark.cc diff --git a/mediapipe/calculators/audio/BUILD b/mediapipe/calculators/audio/BUILD index 4a8f0f598..369c121e3 100644 --- a/mediapipe/calculators/audio/BUILD +++ b/mediapipe/calculators/audio/BUILD @@ -219,12 +219,10 @@ cc_library( deps = [ ":time_series_framer_calculator_cc_proto", "//mediapipe/framework:calculator_framework", + "//mediapipe/framework:timestamp", "//mediapipe/framework/formats:matrix", "//mediapipe/framework/formats:time_series_header_cc_proto", - "//mediapipe/framework/port:integral_types", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:ret_check", - "//mediapipe/framework/port:status", "//mediapipe/util:time_series_util", "@com_google_audio_tools//audio/dsp:window_functions", "@eigen_archive//:eigen3", @@ -319,6 +317,20 @@ cc_test( ], ) +cc_binary( + name = "time_series_framer_calculator_benchmark", + srcs = ["time_series_framer_calculator_benchmark.cc"], + deps = [ + ":time_series_framer_calculator", + ":time_series_framer_calculator_cc_proto", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework:packet", + "//mediapipe/framework/formats:matrix", + "//mediapipe/framework/formats:time_series_header_cc_proto", + "@com_google_benchmark//:benchmark", + ], +) + cc_test( name = "time_series_framer_calculator_test", srcs = ["time_series_framer_calculator_test.cc"], diff --git a/mediapipe/calculators/audio/time_series_framer_calculator.cc b/mediapipe/calculators/audio/time_series_framer_calculator.cc index a200b898a..1c9dd4ba7 100644 --- a/mediapipe/calculators/audio/time_series_framer_calculator.cc +++ b/mediapipe/calculators/audio/time_series_framer_calculator.cc @@ -15,9 +15,7 @@ // Defines TimeSeriesFramerCalculator. #include -#include -#include -#include +#include #include "Eigen/Core" #include "audio/dsp/window_functions.h" @@ -25,9 +23,8 @@ #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/formats/matrix.h" #include "mediapipe/framework/formats/time_series_header.pb.h" -#include "mediapipe/framework/port/integral_types.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/ret_check.h" +#include "mediapipe/framework/timestamp.h" #include "mediapipe/util/time_series_util.h" namespace mediapipe { @@ -88,11 +85,6 @@ class TimeSeriesFramerCalculator : public CalculatorBase { absl::Status Close(CalculatorContext* cc) override; private: - // Adds input data to the internal buffer. - void EnqueueInput(CalculatorContext* cc); - // Constructs and emits framed output packets. - void FrameOutput(CalculatorContext* cc); - Timestamp CurrentOutputTimestamp() { if (use_local_timestamp_) { return current_timestamp_; @@ -106,14 +98,6 @@ class TimeSeriesFramerCalculator : public CalculatorBase { Timestamp::kTimestampUnitsPerSecond); } - // Returns the timestamp of a sample on a base, which is usually the time - // stamp of a packet. - Timestamp CurrentSampleTimestamp(const Timestamp& timestamp_base, - int64_t number_of_samples) { - return timestamp_base + round(number_of_samples / sample_rate_ * - Timestamp::kTimestampUnitsPerSecond); - } - // The number of input samples to advance after the current output frame is // emitted. int next_frame_step_samples() const { @@ -142,61 +126,172 @@ class TimeSeriesFramerCalculator : public CalculatorBase { Timestamp initial_input_timestamp_; // The current timestamp is updated along with the incoming packets. Timestamp current_timestamp_; - int num_channels_; - // Each entry in this deque consists of a single sample, i.e. a - // single column vector, and its timestamp. - std::deque> sample_buffer_; + // Samples are buffered in a vector of sample blocks. + class SampleBlockBuffer { + public: + // Initializes the buffer. + void Init(double sample_rate, int num_channels) { + ts_units_per_sample_ = Timestamp::kTimestampUnitsPerSecond / sample_rate; + num_channels_ = num_channels; + num_samples_ = 0; + first_block_offset_ = 0; + } + + // Number of channels, equal to the number of rows in each Matrix. + int num_channels() const { return num_channels_; } + // Total number of available samples over all blocks. + int num_samples() const { return num_samples_; } + + // Pushes a new block of samples on the back of the buffer with `timestamp` + // being the input timestamp of the packet containing the Matrix. + void Push(const Matrix& samples, Timestamp timestamp); + // Copies `count` samples from the front of the buffer. If there are fewer + // samples than this, the result is zero padded to have `count` samples. + // The timestamp of the last copied sample is written to *last_timestamp. + // This output is used below to update `current_timestamp_`, which is only + // used when `use_local_timestamp` is true. + Matrix CopySamples(int count, Timestamp* last_timestamp) const; + // Drops `count` samples from the front of the buffer. If `count` exceeds + // `num_samples()`, the buffer is emptied. Returns how many samples were + // dropped. + int DropSamples(int count); + + private: + struct Block { + // Matrix of num_channels rows by num_samples columns, a block of possibly + // multiple samples. + Matrix samples; + // Timestamp of the first sample in the Block. This comes from the input + // packet's timestamp that contains this Matrix. + Timestamp timestamp; + + Block() : timestamp(Timestamp::Unstarted()) {} + Block(const Matrix& samples, Timestamp timestamp) + : samples(samples), timestamp(timestamp) {} + int num_samples() const { return samples.cols(); } + }; + std::vector blocks_; + // Number of timestamp units per sample. Used to compute timestamps as + // nth sample timestamp = base_timestamp + round(ts_units_per_sample_ * n). + double ts_units_per_sample_; + // Number of rows in each Matrix. + int num_channels_; + // The total number of samples over all blocks, equal to + // (sum_i blocks_[i].num_samples()) - first_block_offset_. + int num_samples_; + // The number of samples in the first block that have been discarded. This + // way we can cheaply represent "partially discarding" a block. + int first_block_offset_; + } sample_buffer_; bool use_window_; - Matrix window_; + Eigen::RowVectorXf window_; bool use_local_timestamp_; }; REGISTER_CALCULATOR(TimeSeriesFramerCalculator); -void TimeSeriesFramerCalculator::EnqueueInput(CalculatorContext* cc) { - const Matrix& input_frame = cc->Inputs().Index(0).Get(); - - for (int i = 0; i < input_frame.cols(); ++i) { - sample_buffer_.emplace_back(std::make_pair( - input_frame.col(i), CurrentSampleTimestamp(cc->InputTimestamp(), i))); - } +void TimeSeriesFramerCalculator::SampleBlockBuffer::Push(const Matrix& samples, + Timestamp timestamp) { + num_samples_ += samples.cols(); + blocks_.emplace_back(samples, timestamp); } -void TimeSeriesFramerCalculator::FrameOutput(CalculatorContext* cc) { - while (sample_buffer_.size() >= +Matrix TimeSeriesFramerCalculator::SampleBlockBuffer::CopySamples( + int count, Timestamp* last_timestamp) const { + Matrix copied(num_channels_, count); + + if (!blocks_.empty()) { + int num_copied = 0; + // First block has an offset for samples that have been discarded. + int offset = first_block_offset_; + int n; + Timestamp last_block_ts; + + for (auto it = blocks_.begin(); it != blocks_.end() && count > 0; ++it) { + n = std::min(it->num_samples() - offset, count); + // Copy `n` samples from the next block. + copied.middleCols(num_copied, n) = it->samples.middleCols(offset, n); + count -= n; + num_copied += n; + last_block_ts = it->timestamp; + offset = 0; // No samples have been discarded in subsequent blocks. + } + + // Compute the timestamp of the last copied sample. + *last_timestamp = + last_block_ts + std::round(ts_units_per_sample_ * (n - 1)); + } + + if (count > 0) { + copied.rightCols(count).setZero(); // Zero pad if needed. + } + + return copied; +} + +int TimeSeriesFramerCalculator::SampleBlockBuffer::DropSamples(int count) { + if (blocks_.empty()) { + return 0; + } + + auto block_it = blocks_.begin(); + if (first_block_offset_ + count < block_it->num_samples()) { + // `count` is less than the remaining samples in the first block. + first_block_offset_ += count; + num_samples_ -= count; + return count; + } + + int num_samples_dropped = block_it->num_samples() - first_block_offset_; + count -= num_samples_dropped; + first_block_offset_ = 0; + + for (++block_it; block_it != blocks_.end(); ++block_it) { + if (block_it->num_samples() > count) { + break; + } + num_samples_dropped += block_it->num_samples(); + count -= block_it->num_samples(); + } + + blocks_.erase(blocks_.begin(), block_it); // Drop whole blocks. + if (!blocks_.empty()) { + first_block_offset_ = count; // Drop part of the next block. + num_samples_dropped += count; + } + + num_samples_ -= num_samples_dropped; + return num_samples_dropped; +} + +absl::Status TimeSeriesFramerCalculator::Process(CalculatorContext* cc) { + if (initial_input_timestamp_ == Timestamp::Unstarted()) { + initial_input_timestamp_ = cc->InputTimestamp(); + current_timestamp_ = initial_input_timestamp_; + } + + // Add input data to the internal buffer. + sample_buffer_.Push(cc->Inputs().Index(0).Get(), + cc->InputTimestamp()); + + // Construct and emit framed output packets. + while (sample_buffer_.num_samples() >= frame_duration_samples_ + samples_still_to_drop_) { - while (samples_still_to_drop_ > 0) { - sample_buffer_.pop_front(); - --samples_still_to_drop_; - } + sample_buffer_.DropSamples(samples_still_to_drop_); + Matrix output_frame = sample_buffer_.CopySamples(frame_duration_samples_, + ¤t_timestamp_); const int frame_step_samples = next_frame_step_samples(); - std::unique_ptr output_frame( - new Matrix(num_channels_, frame_duration_samples_)); - for (int i = 0; i < std::min(frame_step_samples, frame_duration_samples_); - ++i) { - output_frame->col(i) = sample_buffer_.front().first; - current_timestamp_ = sample_buffer_.front().second; - sample_buffer_.pop_front(); - } - const int frame_overlap_samples = - frame_duration_samples_ - frame_step_samples; - if (frame_overlap_samples > 0) { - for (int i = 0; i < frame_overlap_samples; ++i) { - output_frame->col(i + frame_step_samples) = sample_buffer_[i].first; - current_timestamp_ = sample_buffer_[i].second; - } - } else { - samples_still_to_drop_ = -frame_overlap_samples; - } + samples_still_to_drop_ = frame_step_samples; if (use_window_) { - *output_frame = (output_frame->array() * window_.array()).matrix(); + // Apply the window to each row of output_frame. + output_frame.array().rowwise() *= window_.array(); } - cc->Outputs().Index(0).Add(output_frame.release(), - CurrentOutputTimestamp()); + cc->Outputs().Index(0).AddPacket(MakePacket(std::move(output_frame)) + .At(CurrentOutputTimestamp())); ++cumulative_output_frames_; cumulative_completed_samples_ += frame_step_samples; } @@ -206,35 +301,18 @@ void TimeSeriesFramerCalculator::FrameOutput(CalculatorContext* cc) { // fact to enable packet queueing optimizations. cc->Outputs().Index(0).SetNextTimestampBound(CumulativeOutputTimestamp()); } -} - -absl::Status TimeSeriesFramerCalculator::Process(CalculatorContext* cc) { - if (initial_input_timestamp_ == Timestamp::Unstarted()) { - initial_input_timestamp_ = cc->InputTimestamp(); - current_timestamp_ = initial_input_timestamp_; - } - - EnqueueInput(cc); - FrameOutput(cc); return absl::OkStatus(); } absl::Status TimeSeriesFramerCalculator::Close(CalculatorContext* cc) { - while (samples_still_to_drop_ > 0 && !sample_buffer_.empty()) { - sample_buffer_.pop_front(); - --samples_still_to_drop_; - } - if (!sample_buffer_.empty() && pad_final_packet_) { - std::unique_ptr output_frame(new Matrix); - output_frame->setZero(num_channels_, frame_duration_samples_); - for (int i = 0; i < sample_buffer_.size(); ++i) { - output_frame->col(i) = sample_buffer_[i].first; - current_timestamp_ = sample_buffer_[i].second; - } + sample_buffer_.DropSamples(samples_still_to_drop_); - cc->Outputs().Index(0).Add(output_frame.release(), - CurrentOutputTimestamp()); + if (sample_buffer_.num_samples() > 0 && pad_final_packet_) { + Matrix output_frame = sample_buffer_.CopySamples(frame_duration_samples_, + ¤t_timestamp_); + cc->Outputs().Index(0).AddPacket(MakePacket(std::move(output_frame)) + .At(CurrentOutputTimestamp())); } return absl::OkStatus(); @@ -258,7 +336,7 @@ absl::Status TimeSeriesFramerCalculator::Open(CalculatorContext* cc) { cc->Inputs().Index(0).Header(), &input_header)); sample_rate_ = input_header.sample_rate(); - num_channels_ = input_header.num_channels(); + sample_buffer_.Init(sample_rate_, input_header.num_channels()); frame_duration_samples_ = time_series_util::SecondsToSamples( framer_options.frame_duration_seconds(), sample_rate_); RET_CHECK_GT(frame_duration_samples_, 0) @@ -312,9 +390,8 @@ absl::Status TimeSeriesFramerCalculator::Open(CalculatorContext* cc) { } if (use_window_) { - window_ = Matrix::Ones(num_channels_, 1) * - Eigen::Map(window_vector.data(), 1, - frame_duration_samples_) + window_ = Eigen::Map(window_vector.data(), + frame_duration_samples_) .cast(); } use_local_timestamp_ = framer_options.use_local_timestamp(); diff --git a/mediapipe/calculators/audio/time_series_framer_calculator_benchmark.cc b/mediapipe/calculators/audio/time_series_framer_calculator_benchmark.cc new file mode 100644 index 000000000..28e5b62c7 --- /dev/null +++ b/mediapipe/calculators/audio/time_series_framer_calculator_benchmark.cc @@ -0,0 +1,92 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Benchmark for TimeSeriesFramerCalculator. +#include +#include +#include + +#include "benchmark/benchmark.h" +#include "mediapipe/calculators/audio/time_series_framer_calculator.pb.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/formats/matrix.h" +#include "mediapipe/framework/formats/time_series_header.pb.h" +#include "mediapipe/framework/packet.h" + +using ::mediapipe::Matrix; + +void BM_TimeSeriesFramerCalculator(benchmark::State& state) { + constexpr float kSampleRate = 32000.0; + constexpr int kNumChannels = 2; + constexpr int kFrameDurationSeconds = 5.0; + std::mt19937 rng(0 /*seed*/); + // Input around a half second's worth of samples at a time. + std::uniform_int_distribution input_size_dist(15000, 17000); + // Generate a pool of random blocks of samples up front. + std::vector sample_pool; + sample_pool.reserve(20); + for (int i = 0; i < 20; ++i) { + sample_pool.push_back(Matrix::Random(kNumChannels, input_size_dist(rng))); + } + std::uniform_int_distribution pool_index_dist(0, sample_pool.size() - 1); + + mediapipe::CalculatorGraphConfig config; + config.add_input_stream("input"); + config.add_output_stream("output"); + auto* node = config.add_node(); + node->set_calculator("TimeSeriesFramerCalculator"); + node->add_input_stream("input"); + node->add_output_stream("output"); + mediapipe::TimeSeriesFramerCalculatorOptions* options = + node->mutable_options()->MutableExtension( + mediapipe::TimeSeriesFramerCalculatorOptions::ext); + options->set_frame_duration_seconds(kFrameDurationSeconds); + + for (auto _ : state) { + state.PauseTiming(); // Pause benchmark timing. + + // Prepare input packets of random blocks of samples. + std::vector input_packets; + input_packets.reserve(32); + float t = 0; + for (int i = 0; i < 32; ++i) { + auto samples = + std::make_unique(sample_pool[pool_index_dist(rng)]); + const int num_samples = samples->cols(); + input_packets.push_back(mediapipe::Adopt(samples.release()) + .At(mediapipe::Timestamp::FromSeconds(t))); + t += num_samples / kSampleRate; + } + // Initialize graph. + mediapipe::CalculatorGraph graph; + CHECK_OK(graph.Initialize(config)); + // Prepare input header. + auto header = std::make_unique(); + header->set_sample_rate(kSampleRate); + header->set_num_channels(kNumChannels); + + state.ResumeTiming(); // Resume benchmark timing. + + CHECK_OK(graph.StartRun({}, {{"input", Adopt(header.release())}})); + for (auto& packet : input_packets) { + CHECK_OK(graph.AddPacketToInputStream("input", packet)); + } + CHECK(!graph.HasError()); + CHECK_OK(graph.CloseAllInputStreams()); + CHECK_OK(graph.WaitUntilIdle()); + } +} +BENCHMARK(BM_TimeSeriesFramerCalculator); + +BENCHMARK_MAIN(); From 7f39153ff35e747753b5c63f63fddd499f68570a Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Thu, 22 Jun 2023 17:44:07 +0530 Subject: [PATCH 091/106] Added MPPMask Tests --- mediapipe/tasks/ios/test/vision/core/BUILD | 19 +++ .../tasks/ios/test/vision/core/MPPMaskTests.m | 127 ++++++++++++++++++ 2 files changed, 146 insertions(+) create mode 100644 mediapipe/tasks/ios/test/vision/core/MPPMaskTests.m diff --git a/mediapipe/tasks/ios/test/vision/core/BUILD b/mediapipe/tasks/ios/test/vision/core/BUILD index 5932968e5..e8c63f2f6 100644 --- a/mediapipe/tasks/ios/test/vision/core/BUILD +++ b/mediapipe/tasks/ios/test/vision/core/BUILD @@ -54,3 +54,22 @@ ios_unit_test( ":MPPImageObjcTestLibrary", ], ) + +objc_library( + name = "MPPMaskObjcTestLibrary", + testonly = 1, + srcs = ["MPPMaskTests.m"], + deps = [ + "//mediapipe/tasks/ios/vision/core:MPPMask", + ], +) + +ios_unit_test( + name = "MPPMaskObjcTest", + minimum_os_version = MPP_TASK_MINIMUM_OS_VERSION, + runner = tflite_ios_lab_runner("IOS_LATEST"), + tags = TFL_DEFAULT_TAGS + TFL_DISABLED_SANITIZER_TAGS, + deps = [ + ":MPPMaskObjcTestLibrary", + ], +) diff --git a/mediapipe/tasks/ios/test/vision/core/MPPMaskTests.m b/mediapipe/tasks/ios/test/vision/core/MPPMaskTests.m new file mode 100644 index 000000000..05b8de023 --- /dev/null +++ b/mediapipe/tasks/ios/test/vision/core/MPPMaskTests.m @@ -0,0 +1,127 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import "mediapipe/tasks/ios/vision/core/sources/MPPMask.h" + +#import + +/** Unit tests for `MPPMask`. */ +@interface MPPMaskTests : XCTestCase + +@end + +@implementation MPPMaskTests + +#pragma mark - Tests + +- (void)testInitWithUInt8ArrayNoCopySucceeds { + + NSInteger width = 2; + NSInteger height = 3; + + UInt8 uint8Data[] = {128, 128, 128, 128, 128, 128}; + float float32Data[] = {0.501f, 0.501f, 0.501f, 0.501f, 0.501f, 0.501f}; + + MPPMask *mask = [[MPPMask alloc] initWithUInt8Data:&uint8Data width:width height:height shouldCopy:NO]; + + XCTAssertEqual(mask.width, width); + XCTAssertEqual(mask.height, height); + + // Test if UInt8 mask is not copied. + XCTAssertEqual(mask.uint8Data, &uint8Data); + XCTAssertNotEqual(mask.float32Data, NULL); + + for (int i = 0 ; i < width * height ; i ++) { + XCTAssertEqualWithAccuracy(mask.float32Data[i], float32Data[i], 1e-3f, @"index i = %d", i); + } + + // Test if repeated Float32 mask accesses return the same array in memory. + XCTAssertEqual(mask.float32Data, mask.float32Data); +} + +- (void)testInitWithUInt8ArrayCopySucceeds { + + NSInteger width = 2; + NSInteger height = 3; + + UInt8 uint8Data[] = {128, 128, 128, 128, 128, 128}; + float float32Data[] = {0.501f, 0.501f, 0.501f, 0.501f, 0.501f, 0.501f}; + + MPPMask *mask = [[MPPMask alloc] initWithUInt8Data:&uint8Data width:width height:height shouldCopy:YES]; + + XCTAssertEqual(mask.width, width); + XCTAssertEqual(mask.height, height); + + // Test if UInt8 mask is copied. + XCTAssertNotEqual(mask.uint8Data, &uint8Data); + XCTAssertNotEqual(mask.float32Data, NULL); + + for (int i = 0 ; i < width * height ; i ++) { + XCTAssertEqualWithAccuracy(mask.float32Data[i], float32Data[i], 1e-3f); + } + + // Test if repeated Float32 mask accesses return the same array in memory. + XCTAssertEqual(mask.float32Data, mask.float32Data); +} + +- (void)testInitWithFloat32ArrayNoCopySucceeds { + + NSInteger width = 2; + NSInteger height = 3; + + UInt8 uint8Data[] = {132, 132, 132, 132, 132, 132}; + float float32Data[] = {0.52f, 0.52f, 0.52f, 0.52f, 0.52f, 0.52f}; + MPPMask *mask = [[MPPMask alloc] initWithFloat32Data:&float32Data width:width height:height shouldCopy:NO]; + + XCTAssertEqual(mask.width, width); + XCTAssertEqual(mask.height, height); + + // Test if Float32 mask is not copied. + XCTAssertEqual(mask.float32Data, &float32Data); + XCTAssertNotEqual(mask.uint8Data, NULL); + + for (int i = 0 ; i < width * height ; i ++) { + XCTAssertEqual(mask.uint8Data[i], uint8Data[i]); + } + + // Test if repeated UInt8 mask accesses return the same array in memory. + XCTAssertEqual(mask.uint8Data, mask.uint8Data); +} + +- (void)testInitWithFloat32ArrayCopySucceeds { + + NSInteger width = 2; + NSInteger height = 3; + + UInt8 uint8Data[] = {132, 132, 132, 132, 132, 132}; + float float32Data[] = {0.52f, 0.52f, 0.52f, 0.52f, 0.52f, 0.52f}; + + MPPMask *mask = [[MPPMask alloc] initWithFloat32Data:&float32Data width:width height:height shouldCopy:YES]; + + XCTAssertEqual(mask.width, width); + XCTAssertEqual(mask.height, height); + + // Test if Float32 mask is copied. + XCTAssertNotEqual(mask.float32Data, &float32Data); + XCTAssertNotEqual(mask.uint8Data, NULL); + + for (int i = 0 ; i < width * height ; i ++) { + XCTAssertEqual(mask.uint8Data[i], uint8Data[i]); + } + + // Test if repeated UInt8 mask accesses return the same array in memory. + XCTAssertEqual(mask.uint8Data, mask.uint8Data); +} + +@end From ba7e0e0e501c857da6fc675f564a707b306d4cb3 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 22 Jun 2023 07:55:09 -0700 Subject: [PATCH 092/106] Add a face alignment preprocessor to face stylizer. PiperOrigin-RevId: 542559764 --- .../python/vision/face_stylizer/BUILD | 22 ++----- .../python/vision/face_stylizer/constants.py | 6 ++ .../python/vision/face_stylizer/dataset.py | 33 ++++++++-- .../vision/face_stylizer/dataset_test.py | 6 +- .../vision/face_stylizer/face_stylizer.py | 5 +- mediapipe/tasks/python/core/BUILD | 2 +- mediapipe/tasks/python/test/test_utils.py | 66 +++++++++++-------- 7 files changed, 82 insertions(+), 58 deletions(-) diff --git a/mediapipe/model_maker/python/vision/face_stylizer/BUILD b/mediapipe/model_maker/python/vision/face_stylizer/BUILD index a2e30a112..29c30c873 100644 --- a/mediapipe/model_maker/python/vision/face_stylizer/BUILD +++ b/mediapipe/model_maker/python/vision/face_stylizer/BUILD @@ -20,13 +20,6 @@ licenses(["notice"]) package(default_visibility = ["//mediapipe:__subpackages__"]) -filegroup( - name = "testdata", - srcs = glob([ - "testdata/**", - ]), -) - py_library( name = "constants", srcs = ["constants.py"], @@ -72,18 +65,11 @@ py_library( name = "dataset", srcs = ["dataset.py"], deps = [ + ":constants", "//mediapipe/model_maker/python/core/data:classification_dataset", - "//mediapipe/model_maker/python/vision/core:image_utils", - ], -) - -py_test( - name = "dataset_test", - srcs = ["dataset_test.py"], - data = [":testdata"], - deps = [ - ":dataset", - "//mediapipe/tasks/python/test:test_utils", + "//mediapipe/python:_framework_bindings", + "//mediapipe/tasks/python/core:base_options", + "//mediapipe/tasks/python/vision:face_aligner", ], ) diff --git a/mediapipe/model_maker/python/vision/face_stylizer/constants.py b/mediapipe/model_maker/python/vision/face_stylizer/constants.py index e7a03aebd..ac7675232 100644 --- a/mediapipe/model_maker/python/vision/face_stylizer/constants.py +++ b/mediapipe/model_maker/python/vision/face_stylizer/constants.py @@ -41,5 +41,11 @@ FACE_STYLIZER_W_FILES = file_util.DownloadedFiles( 'https://storage.googleapis.com/mediapipe-assets/face_stylizer_w_avg.npy', ) +FACE_ALIGNER_TASK_FILES = file_util.DownloadedFiles( + 'face_stylizer/face_landmarker_v2.task', + 'https://storage.googleapis.com/mediapipe-assets/face_landmarker_v2.task', + is_folder=False, +) + # Dimension of the input style vector to the decoder STYLE_DIM = 512 diff --git a/mediapipe/model_maker/python/vision/face_stylizer/dataset.py b/mediapipe/model_maker/python/vision/face_stylizer/dataset.py index d517fd9c1..93478de1b 100644 --- a/mediapipe/model_maker/python/vision/face_stylizer/dataset.py +++ b/mediapipe/model_maker/python/vision/face_stylizer/dataset.py @@ -13,13 +13,37 @@ # limitations under the License. """Face stylizer dataset library.""" +from typing import Sequence import logging import os import tensorflow as tf from mediapipe.model_maker.python.core.data import classification_dataset -from mediapipe.model_maker.python.vision.core import image_utils +from mediapipe.model_maker.python.vision.face_stylizer import constants +from mediapipe.python._framework_bindings import image as image_module +from mediapipe.tasks.python.core import base_options as base_options_module +from mediapipe.tasks.python.vision import face_aligner + + +def _preprocess_face_dataset( + all_image_paths: Sequence[str], +) -> Sequence[tf.Tensor]: + """Preprocess face image dataset by aligning the face.""" + path = constants.FACE_ALIGNER_TASK_FILES.get_path() + base_options = base_options_module.BaseOptions(model_asset_path=path) + options = face_aligner.FaceAlignerOptions(base_options=base_options) + aligner = face_aligner.FaceAligner.create_from_options(options) + + preprocessed_images = [] + for path in all_image_paths: + tf.compat.v1.logging.info('Preprocess image %s', path) + image = image_module.Image.create_from_file(path) + aligned_image = aligner.align(image) + aligned_image_tensor = tf.convert_to_tensor(aligned_image.numpy_view()) + preprocessed_images.append(aligned_image_tensor) + + return preprocessed_images # TODO: Change to a unlabeled dataset if it makes sense. @@ -58,6 +82,7 @@ class Dataset(classification_dataset.ClassificationDataset): ): raise ValueError('No images found under given directory') + image_data = _preprocess_face_dataset(all_image_paths) label_names = sorted( name for name in os.listdir(data_root) @@ -73,11 +98,7 @@ class Dataset(classification_dataset.ClassificationDataset): for path in all_image_paths ] - path_ds = tf.data.Dataset.from_tensor_slices(all_image_paths) - - image_ds = path_ds.map( - image_utils.load_image, num_parallel_calls=tf.data.AUTOTUNE - ) + image_ds = tf.data.Dataset.from_tensor_slices(image_data) # Load label label_ds = tf.data.Dataset.from_tensor_slices( diff --git a/mediapipe/model_maker/python/vision/face_stylizer/dataset_test.py b/mediapipe/model_maker/python/vision/face_stylizer/dataset_test.py index 73140f30e..900371de1 100644 --- a/mediapipe/model_maker/python/vision/face_stylizer/dataset_test.py +++ b/mediapipe/model_maker/python/vision/face_stylizer/dataset_test.py @@ -12,8 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +import numpy as np import tensorflow as tf +from mediapipe.model_maker.python.vision.core import image_utils from mediapipe.model_maker.python.vision.face_stylizer import dataset from mediapipe.tasks.python.test import test_utils @@ -22,10 +24,10 @@ class DatasetTest(tf.test.TestCase): def setUp(self): super().setUp() - self._test_data_dirname = 'input/style' def test_from_folder(self): - input_data_dir = test_utils.get_test_data_path(self._test_data_dirname) + test_data_dirname = 'input/style' + input_data_dir = test_utils.get_test_data_path(test_data_dirname) data = dataset.Dataset.from_folder(dirname=input_data_dir) self.assertEqual(data.num_classes, 2) self.assertEqual(data.label_names, ['cartoon', 'sketch']) diff --git a/mediapipe/model_maker/python/vision/face_stylizer/face_stylizer.py b/mediapipe/model_maker/python/vision/face_stylizer/face_stylizer.py index 5758ac7b5..dfa8a04b4 100644 --- a/mediapipe/model_maker/python/vision/face_stylizer/face_stylizer.py +++ b/mediapipe/model_maker/python/vision/face_stylizer/face_stylizer.py @@ -14,7 +14,7 @@ """APIs to train face stylization model.""" import os -from typing import Callable, Optional +from typing import Any, Callable, Optional import numpy as np import tensorflow as tf @@ -54,7 +54,6 @@ class FaceStylizer(object): self._model_spec = model_spec self._model_options = model_options self._hparams = hparams - # TODO: Support face alignment in image preprocessor. self._preprocessor = image_preprocessing.Preprocessor( input_shape=self._model_spec.input_image_shape, num_classes=1, @@ -128,7 +127,7 @@ class FaceStylizer(object): def _train_model( self, train_data: classification_ds.ClassificationDataset, - preprocessor: Optional[Callable[..., bool]] = None, + preprocessor: Optional[Callable[..., Any]] = None, ): """Trains the face stylizer model. diff --git a/mediapipe/tasks/python/core/BUILD b/mediapipe/tasks/python/core/BUILD index 76791c232..9d2dc3f0b 100644 --- a/mediapipe/tasks/python/core/BUILD +++ b/mediapipe/tasks/python/core/BUILD @@ -29,7 +29,7 @@ py_library( name = "base_options", srcs = ["base_options.py"], visibility = [ - "//mediapipe/model_maker/python/vision/gesture_recognizer:__subpackages__", + "//mediapipe/model_maker:__subpackages__", "//mediapipe/tasks:users", ], deps = [ diff --git a/mediapipe/tasks/python/test/test_utils.py b/mediapipe/tasks/python/test/test_utils.py index 2dfc5a8c4..e790b9156 100644 --- a/mediapipe/tasks/python/test/test_utils.py +++ b/mediapipe/tasks/python/test/test_utils.py @@ -22,7 +22,6 @@ import six from google.protobuf import descriptor from google.protobuf import descriptor_pool from google.protobuf import text_format - from mediapipe.python._framework_bindings import image as image_module from mediapipe.python._framework_bindings import image_frame as image_frame_module @@ -44,18 +43,21 @@ def test_srcdir(): def get_test_data_path(file_or_dirname_path: str) -> str: """Returns full test data path.""" - for (directory, subdirs, files) in os.walk(test_srcdir()): + for directory, subdirs, files in os.walk(test_srcdir()): for f in subdirs + files: path = os.path.join(directory, f) if path.endswith(file_or_dirname_path): return path - raise ValueError("No %s in test directory: %s." % - (file_or_dirname_path, test_srcdir())) + raise ValueError( + "No %s in test directory: %s." % (file_or_dirname_path, test_srcdir()) + ) -def create_calibration_file(file_dir: str, - file_name: str = "score_calibration.txt", - content: str = "1.0,2.0,3.0,4.0") -> str: +def create_calibration_file( + file_dir: str, + file_name: str = "score_calibration.txt", + content: str = "1.0,2.0,3.0,4.0", +) -> str: """Creates the calibration file.""" calibration_file = os.path.join(file_dir, file_name) with open(calibration_file, mode="w") as file: @@ -63,12 +65,9 @@ def create_calibration_file(file_dir: str, return calibration_file -def assert_proto_equals(self, - a, - b, - check_initialized=True, - normalize_numbers=True, - msg=None): +def assert_proto_equals( + self, a, b, check_initialized=True, normalize_numbers=True, msg=None +): """assert_proto_equals() is useful for unit tests. It produces much more helpful output than assertEqual() for proto2 messages. @@ -113,7 +112,8 @@ def assert_proto_equals(self, self.assertMultiLineEqual(a_str, b_str, msg=msg) else: diff = "".join( - difflib.unified_diff(a_str.splitlines(True), b_str.splitlines(True))) + difflib.unified_diff(a_str.splitlines(True), b_str.splitlines(True)) + ) if diff: self.fail("%s :\n%s" % (msg, diff)) @@ -147,14 +147,18 @@ def _normalize_number_fields(pb): # We force 32-bit values to int and 64-bit values to long to make # alternate implementations where the distinction is more significant # (e.g. the C++ implementation) simpler. - if desc.type in (descriptor.FieldDescriptor.TYPE_INT64, - descriptor.FieldDescriptor.TYPE_UINT64, - descriptor.FieldDescriptor.TYPE_SINT64): + if desc.type in ( + descriptor.FieldDescriptor.TYPE_INT64, + descriptor.FieldDescriptor.TYPE_UINT64, + descriptor.FieldDescriptor.TYPE_SINT64, + ): normalized_values = [int(x) for x in values] - elif desc.type in (descriptor.FieldDescriptor.TYPE_INT32, - descriptor.FieldDescriptor.TYPE_UINT32, - descriptor.FieldDescriptor.TYPE_SINT32, - descriptor.FieldDescriptor.TYPE_ENUM): + elif desc.type in ( + descriptor.FieldDescriptor.TYPE_INT32, + descriptor.FieldDescriptor.TYPE_UINT32, + descriptor.FieldDescriptor.TYPE_SINT32, + descriptor.FieldDescriptor.TYPE_ENUM, + ): normalized_values = [int(x) for x in values] elif desc.type == descriptor.FieldDescriptor.TYPE_FLOAT: normalized_values = [round(x, 4) for x in values] @@ -168,14 +172,20 @@ def _normalize_number_fields(pb): else: setattr(pb, desc.name, normalized_values[0]) - if (desc.type == descriptor.FieldDescriptor.TYPE_MESSAGE or - desc.type == descriptor.FieldDescriptor.TYPE_GROUP): - if (desc.type == descriptor.FieldDescriptor.TYPE_MESSAGE and - desc.message_type.has_options and - desc.message_type.GetOptions().map_entry): + if ( + desc.type == descriptor.FieldDescriptor.TYPE_MESSAGE + or desc.type == descriptor.FieldDescriptor.TYPE_GROUP + ): + if ( + desc.type == descriptor.FieldDescriptor.TYPE_MESSAGE + and desc.message_type.has_options + and desc.message_type.GetOptions().map_entry + ): # This is a map, only recurse if the values have a message type. - if (desc.message_type.fields_by_number[2].type == - descriptor.FieldDescriptor.TYPE_MESSAGE): + if ( + desc.message_type.fields_by_number[2].type + == descriptor.FieldDescriptor.TYPE_MESSAGE + ): for v in six.itervalues(values): _normalize_number_fields(v) else: From 98d493f37a2ba1f651aba9fa41e9be110f6ee3cc Mon Sep 17 00:00:00 2001 From: Jiuqiang Tang Date: Thu, 22 Jun 2023 11:24:00 -0700 Subject: [PATCH 093/106] Add MatrixData as a packet option for ConstantSidePacketCalculatorOptions. PiperOrigin-RevId: 542616847 --- mediapipe/calculators/core/BUILD | 2 ++ .../calculators/core/constant_side_packet_calculator.cc | 5 +++++ .../calculators/core/constant_side_packet_calculator.proto | 2 ++ 3 files changed, 9 insertions(+) diff --git a/mediapipe/calculators/core/BUILD b/mediapipe/calculators/core/BUILD index a425b7e38..99a63f633 100644 --- a/mediapipe/calculators/core/BUILD +++ b/mediapipe/calculators/core/BUILD @@ -117,6 +117,7 @@ mediapipe_proto_library( "//mediapipe/framework:calculator_proto", "//mediapipe/framework/formats:classification_proto", "//mediapipe/framework/formats:landmark_proto", + "//mediapipe/framework/formats:matrix_data_proto", "//mediapipe/framework/formats:time_series_header_proto", ], ) @@ -1168,6 +1169,7 @@ cc_library( "//mediapipe/framework:collection_item_id", "//mediapipe/framework/formats:classification_cc_proto", "//mediapipe/framework/formats:landmark_cc_proto", + "//mediapipe/framework/formats:matrix_data_cc_proto", "//mediapipe/framework/formats:time_series_header_cc_proto", "//mediapipe/framework/port:integral_types", "//mediapipe/framework/port:ret_check", diff --git a/mediapipe/calculators/core/constant_side_packet_calculator.cc b/mediapipe/calculators/core/constant_side_packet_calculator.cc index 0bcf22ec9..8762c9874 100644 --- a/mediapipe/calculators/core/constant_side_packet_calculator.cc +++ b/mediapipe/calculators/core/constant_side_packet_calculator.cc @@ -19,6 +19,7 @@ #include "mediapipe/framework/collection_item_id.h" #include "mediapipe/framework/formats/classification.pb.h" #include "mediapipe/framework/formats/landmark.pb.h" +#include "mediapipe/framework/formats/matrix_data.pb.h" #include "mediapipe/framework/formats/time_series_header.pb.h" #include "mediapipe/framework/port/canonical_errors.h" #include "mediapipe/framework/port/integral_types.h" @@ -85,6 +86,8 @@ class ConstantSidePacketCalculator : public CalculatorBase { packet.Set(); } else if (packet_options.has_double_value()) { packet.Set(); + } else if (packet_options.has_matrix_data_value()) { + packet.Set(); } else if (packet_options.has_time_series_header_value()) { packet.Set(); } else if (packet_options.has_int64_value()) { @@ -123,6 +126,8 @@ class ConstantSidePacketCalculator : public CalculatorBase { MakePacket(packet_options.landmark_list_value())); } else if (packet_options.has_double_value()) { packet.Set(MakePacket(packet_options.double_value())); + } else if (packet_options.has_matrix_data_value()) { + packet.Set(MakePacket(packet_options.matrix_data_value())); } else if (packet_options.has_time_series_header_value()) { packet.Set(MakePacket( packet_options.time_series_header_value())); diff --git a/mediapipe/calculators/core/constant_side_packet_calculator.proto b/mediapipe/calculators/core/constant_side_packet_calculator.proto index bce827055..0d53175fc 100644 --- a/mediapipe/calculators/core/constant_side_packet_calculator.proto +++ b/mediapipe/calculators/core/constant_side_packet_calculator.proto @@ -19,6 +19,7 @@ package mediapipe; import "mediapipe/framework/calculator.proto"; import "mediapipe/framework/formats/classification.proto"; import "mediapipe/framework/formats/landmark.proto"; +import "mediapipe/framework/formats/matrix_data.proto"; import "mediapipe/framework/formats/time_series_header.proto"; message ConstantSidePacketCalculatorOptions { @@ -38,6 +39,7 @@ message ConstantSidePacketCalculatorOptions { ClassificationList classification_list_value = 6; LandmarkList landmark_list_value = 7; TimeSeriesHeader time_series_header_value = 10; + MatrixData matrix_data_value = 12; } } From 2f5fc16a3815c15626455f376264f5e1197b75e9 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 22 Jun 2023 12:58:43 -0700 Subject: [PATCH 094/106] Fix timestamp computation when copying within first block. When computing the last copied sample's timestamp, first_block_offset_ needs to be taken into account. PiperOrigin-RevId: 542643291 --- mediapipe/calculators/audio/time_series_framer_calculator.cc | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/mediapipe/calculators/audio/time_series_framer_calculator.cc b/mediapipe/calculators/audio/time_series_framer_calculator.cc index 1c9dd4ba7..2911c5720 100644 --- a/mediapipe/calculators/audio/time_series_framer_calculator.cc +++ b/mediapipe/calculators/audio/time_series_framer_calculator.cc @@ -208,6 +208,7 @@ Matrix TimeSeriesFramerCalculator::SampleBlockBuffer::CopySamples( int offset = first_block_offset_; int n; Timestamp last_block_ts; + int last_sample_index; for (auto it = blocks_.begin(); it != blocks_.end() && count > 0; ++it) { n = std::min(it->num_samples() - offset, count); @@ -216,12 +217,13 @@ Matrix TimeSeriesFramerCalculator::SampleBlockBuffer::CopySamples( count -= n; num_copied += n; last_block_ts = it->timestamp; + last_sample_index = offset + n - 1; offset = 0; // No samples have been discarded in subsequent blocks. } // Compute the timestamp of the last copied sample. *last_timestamp = - last_block_ts + std::round(ts_units_per_sample_ * (n - 1)); + last_block_ts + std::round(ts_units_per_sample_ * last_sample_index); } if (count > 0) { From 4e862995ba32e490d56bafec7869f6ea90001326 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 22 Jun 2023 13:59:27 -0700 Subject: [PATCH 095/106] Fix typo PiperOrigin-RevId: 542660548 --- mediapipe/tasks/python/metadata/metadata.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/mediapipe/tasks/python/metadata/metadata.py b/mediapipe/tasks/python/metadata/metadata.py index e888a9d11..c7375232c 100644 --- a/mediapipe/tasks/python/metadata/metadata.py +++ b/mediapipe/tasks/python/metadata/metadata.py @@ -737,7 +737,7 @@ class MetadataDisplayer(object): metadata_buffer = get_metadata_buffer(model_buffer) if not metadata_buffer: raise ValueError("The model does not have metadata.") - associated_file_list = cls._parse_packed_associted_file_list(model_buffer) + associated_file_list = cls._parse_packed_associated_file_list(model_buffer) return cls(model_buffer, metadata_buffer, associated_file_list) def get_associated_file_buffer(self, filename): @@ -775,8 +775,8 @@ class MetadataDisplayer(object): """ return copy.deepcopy(self._associated_file_list) - @staticmethod - def _parse_packed_associted_file_list(model_buf): + @classmethod + def _parse_packed_associated_file_list(cls, model_buf): """Gets a list of associated files packed to the model file. Args: From a8899da45a141bd5b5549ab021913ec25d58bd01 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 22 Jun 2023 14:47:11 -0700 Subject: [PATCH 096/106] Fix -Wsign-compare warning in api2/builder.h PiperOrigin-RevId: 542673286 --- mediapipe/framework/api2/builder.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mediapipe/framework/api2/builder.h b/mediapipe/framework/api2/builder.h index 51e59973c..0c4c82f37 100644 --- a/mediapipe/framework/api2/builder.h +++ b/mediapipe/framework/api2/builder.h @@ -32,7 +32,7 @@ template struct dependent_false : std::false_type {}; template -T& GetWithAutoGrow(std::vector>* vecp, int index) { +T& GetWithAutoGrow(std::vector>* vecp, size_t index) { auto& vec = *vecp; if (vec.size() <= index) { vec.resize(index + 1); From 7fe365489dfd2f6726a9db734b244d74e778e4fb Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Fri, 23 Jun 2023 20:09:05 +0530 Subject: [PATCH 097/106] Added iOS Image Segmenter Result --- .../tasks/ios/vision/image_segmenter/BUILD | 28 ++++++++ .../sources/MPPImageSegmenterResult.h | 65 +++++++++++++++++++ .../sources/MPPImageSegmenterResult.m | 32 +++++++++ 3 files changed, 125 insertions(+) create mode 100644 mediapipe/tasks/ios/vision/image_segmenter/BUILD create mode 100644 mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterResult.h create mode 100644 mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterResult.m diff --git a/mediapipe/tasks/ios/vision/image_segmenter/BUILD b/mediapipe/tasks/ios/vision/image_segmenter/BUILD new file mode 100644 index 000000000..c0700a8d9 --- /dev/null +++ b/mediapipe/tasks/ios/vision/image_segmenter/BUILD @@ -0,0 +1,28 @@ +# Copyright 2023 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +package(default_visibility = ["//mediapipe/tasks:internal"]) + +licenses(["notice"]) + +objc_library( + name = "MPPImageSegmenterResult", + srcs = ["sources/MPPImageSegmenterResult.m"], + hdrs = ["sources/MPPImageSegmenterResult.h"], + deps = [ + "//mediapipe/tasks/ios/vision/core:MPPMask", + "//mediapipe/tasks/ios/core:MPPTaskResult", + ], +) + diff --git a/mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterResult.h b/mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterResult.h new file mode 100644 index 000000000..c0c299f77 --- /dev/null +++ b/mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterResult.h @@ -0,0 +1,65 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import +#import "mediapipe/tasks/ios/core/sources/MPPTaskResult.h" +#import "mediapipe/tasks/ios/vision/core/sources/MPPMask.h" + +NS_ASSUME_NONNULL_BEGIN + +/** Represents the segmentation results generated by `MPPImageSegmenter`. */ +NS_SWIFT_NAME(ImageSegmenterResult) +@interface MPPImageSegmenterResult : MPPTaskResult + +/** An optional array of `MPPMask` objects. Each `MPPMask` in the array holds a 32 bit float array + * of size `image width` * `image height` which represents the confidence mask for each category. + * Each element of the float array represents the confidence with which the model predicted that the + * corresponding pixel belongs to the category that the mask represents, usually in the range [0, + * 1]. */ +@property(nonatomic, readonly, nullable) NSArray *confidenceMasks; + +/** An optional `MPPMask` that holds a`UInt8` array of size `image width` * `image height`. Each + * element of this array represents the class to which the pixel in the original image was predicted + * to belong to. */ +@property(nonatomic, readonly, nullable) MPPMask *categoryMask; + +/** + * The quality scores of the result masks, in the range of [0, 1]. Defaults + * to `1` if the model doesn't output quality scores. Each element corresponds to the score of + * the category in the model outputs. + */ +@property(nonatomic, readonly, nullable) NSArray *qualityScores; + +/** + * Initializes a new `MPPImageSegmenterResult` with the given array of confidence masks, category + * mask, quality scores and timestamp (in milliseconds). + * + * @param confidenceMasks An optional array of `MPPMask` objects. Each `MPPMask` in the array must + * be of type `MPPMaskDataTypeFloat32`. + * @param categoryMask An optional `MPMask` object of type `MPPMaskDataTypeUInt8`. + * @param qualityScores The quality scores of the result masks of type NSArray *. Each + * `NSNumber` in the array holds a `float`. + * @param timestampInMilliseconds The timestamp (in milliseconds) for this result. + * + * @return An instance of `MPPImageSegmenterResult` initialized with the given array of confidence + * masks, category mask, quality scores and timestamp (in milliseconds). + */ +- (instancetype)initWithConfidenceMasks:(nullable NSArray *)confidenceMasks + categoryMasks:(nullable MPPMask *)categoryMask + qualityScores:(nullable NSArray *)qualityScores + timestampInMilliseconds:(NSInteger)timestampInMilliseconds; + +@end + +NS_ASSUME_NONNULL_END diff --git a/mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterResult.m b/mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterResult.m new file mode 100644 index 000000000..2b11fc160 --- /dev/null +++ b/mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterResult.m @@ -0,0 +1,32 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import "mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterResult.h" + +@implementation MPPImageSegmenterResult + +- (instancetype)initWithConfidenceMasks:(NSArray *)confidenceMasks + categoryMask:(MPPMask *)categoryMask + qualityScores:(NSArray *)qualityScores + timestampInMilliseconds:(NSInteger)timestampInMilliseconds { + self = [super initWithTimestampInMilliseconds:timestampInMilliseconds]; + if (self) { + _confidenceMasks = confidenceMasks; + _categoryMask = categoryMask; + _qualityScores = qualityScores; + } + return self; +} + +@end From 7623c5a9410068deb4410dc7aad7f130b0eaabe6 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Fri, 23 Jun 2023 20:09:18 +0530 Subject: [PATCH 098/106] Added iOS Image Segmenter Options --- .../tasks/ios/vision/image_segmenter/BUILD | 10 ++ .../sources/MPPImageSegmenterOptions.h | 99 +++++++++++++++++++ .../sources/MPPImageSegmenterOptions.m | 40 ++++++++ 3 files changed, 149 insertions(+) create mode 100644 mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterOptions.h create mode 100644 mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterOptions.m diff --git a/mediapipe/tasks/ios/vision/image_segmenter/BUILD b/mediapipe/tasks/ios/vision/image_segmenter/BUILD index c0700a8d9..eb6411852 100644 --- a/mediapipe/tasks/ios/vision/image_segmenter/BUILD +++ b/mediapipe/tasks/ios/vision/image_segmenter/BUILD @@ -26,3 +26,13 @@ objc_library( ], ) +objc_library( + name = "MPPImageSegmenterOptions", + srcs = ["sources/MPPImageSegmenterOptions.m"], + hdrs = ["sources/MPPImageSegmenterOptions.h"], + deps = [ + ":MPPImageSegmenterResult", + "//mediapipe/tasks/ios/core:MPPTaskOptions", + "//mediapipe/tasks/ios/vision/core:MPPRunningMode", + ], +) diff --git a/mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterOptions.h b/mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterOptions.h new file mode 100644 index 000000000..65a822c1e --- /dev/null +++ b/mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterOptions.h @@ -0,0 +1,99 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import + +#import "mediapipe/tasks/ios/core/sources/MPPTaskOptions.h" +#import "mediapipe/tasks/ios/vision/core/sources/MPPRunningMode.h" +#import "mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterResult.h" + +NS_ASSUME_NONNULL_BEGIN + +@class MPPImageSegmenter; + +/** + * This protocol defines an interface for the delegates of `MPPImageSegmenter` object to receive + * results of performing asynchronous segmentation on images (i.e, when `runningMode` = + * `MPPRunningModeLiveStream`). + * + * The delegate of `MPPImageSegmenter` must adopt `MPPImageSegmenterLiveStreamDelegate` protocol. + * The methods in this protocol are optional. + */ +NS_SWIFT_NAME(ObjectDetectorLiveStreamDelegate) +@protocol MPPImageSegmenterLiveStreamDelegate + +@optional + +/** + * This method notifies a delegate that the results of asynchronous segmentation of + * an image submitted to the `MPPImageSegmenter` is available. + * + * This method is called on a private serial dispatch queue created by the `MPPImageSegmenter` + * for performing the asynchronous delegates calls. + * + * @param imageSegmenter The image segmenter which performed the segmentation. + * This is useful to test equality when there are multiple instances of `MPPImageSegmenter`. + * @param result The `MPPImageSegmenterResult` object that contains a list of category or confidence + * masks and optional quality scores. + * @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input + * image was sent to the image segmenter. + * @param error An optional error parameter populated when there is an error in performing + * segmentation on the input live stream image data. + */ +- (void)imageSegmenter:(MPPImageSegmenter *)imageSegmenter + didFinishSegmentationWithResult:(nullable MPPImageSegmenterResult *)result + timestampInMilliseconds:(NSInteger)timestampInMilliseconds + error:(nullable NSError *)error + NS_SWIFT_NAME(imageSegmenter(_:didFinishSegmentation:timestampInMilliseconds:error:)); +@end + +/** Options for setting up a `MPPImageSegmenter`. */ +NS_SWIFT_NAME(ObjectDetectorOptions) +@interface MPPImageSegmenterOptions : MPPTaskOptions + +/** + * Running mode of the image segmenter task. Defaults to `MPPRunningModeImage`. + * `MPPImageSegmenter` can be created with one of the following running modes: + * 1. `MPPRunningModeImage`: The mode for performing segmentation on single image inputs. + * 2. `MPPRunningModeVideo`: The mode for performing segmentation on the decoded frames of a + * video. + * 3. `MPPRunningModeLiveStream`: The mode for performing segmentation on a live stream of + * input data, such as from the camera. + */ +@property(nonatomic) MPPRunningMode runningMode; + +/** + * An object that confirms to `MPPImageSegmenterLiveStreamDelegate` protocol. This object must + * implement `imageSegmenter:didFinishSegmentationWithResult:timestampInMilliseconds:error:` to + * receive the results of performing asynchronous segmentation on images (i.e, when `runningMode` = + * `MPPRunningModeLiveStream`). + */ +@property(nonatomic, weak, nullable) id + imageSegmenterLiveStreamDelegate; + +/** + * The locale to use for display names specified through the TFLite Model Metadata, if any. Defaults + * to English. + */ +@property(nonatomic, copy) NSString *displayNamesLocale; + +/** Represents whether to output confidence masks. */ +@property(nonatomic) BOOL shouldOutputConfidenceMasks; + +/** Represents whether to output category mask. */ +@property(nonatomic) BOOL shouldOutputCategoryMasks; + +@end + +NS_ASSUME_NONNULL_END diff --git a/mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterOptions.m b/mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterOptions.m new file mode 100644 index 000000000..282a729bb --- /dev/null +++ b/mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterOptions.m @@ -0,0 +1,40 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import "mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterOptions.h" + +@implementation MPPImageSegmenterOptions + +- (instancetype)init { + self = [super init]; + if (self) { + _displayNamesLocale = @"en"; + _shouldOutputConfidenceMasks = YES; + } + return self; +} + +- (id)copyWithZone:(NSZone *)zone { + MPPImageSegmenterOptions *imageSegmenterOptions = [super copyWithZone:zone]; + + imageSegmenterOptions.runningMode = self.runningMode; + imageSegmenterOptions.shouldOutputConfidenceMasks = self.shouldOutputConfidenceMasks; + imageSegmenterOptions.shouldOutputCategoryMasks = self.shouldOutputConfidenceMasks; + imageSegmenterOptions.displayNamesLocale = self.displayNamesLocale; + imageSegmenterOptions.imageSegmenterLiveStreamDelegate = self.imageSegmenterLiveStreamDelegate; + + return imageSegmenterOptions; +} + +@end From 5dce8f283dbd6be4cff8521866b707b2bd25113f Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Fri, 23 Jun 2023 20:10:42 +0530 Subject: [PATCH 099/106] Updated image segmenter delegate method to be required --- .../vision/image_segmenter/sources/MPPImageSegmenterOptions.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterOptions.h b/mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterOptions.h index 65a822c1e..31ae45b6c 100644 --- a/mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterOptions.h +++ b/mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterOptions.h @@ -33,7 +33,7 @@ NS_ASSUME_NONNULL_BEGIN NS_SWIFT_NAME(ObjectDetectorLiveStreamDelegate) @protocol MPPImageSegmenterLiveStreamDelegate -@optional +@required /** * This method notifies a delegate that the results of asynchronous segmentation of From bfb68491af291e87a225b34f304431a38878e676 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Fri, 23 Jun 2023 20:13:29 +0530 Subject: [PATCH 100/106] Added copying of running mode in NSCopying implementation in iOS tasks --- .../ios/vision/face_detector/sources/MPPFaceDetectorOptions.m | 1 + .../vision/face_landmarker/sources/MPPFaceLandmarkerOptions.m | 1 + .../vision/image_classifier/sources/MPPImageClassifierOptions.m | 1 + .../vision/object_detector/sources/MPPObjectDetectorOptions.m | 1 + 4 files changed, 4 insertions(+) diff --git a/mediapipe/tasks/ios/vision/face_detector/sources/MPPFaceDetectorOptions.m b/mediapipe/tasks/ios/vision/face_detector/sources/MPPFaceDetectorOptions.m index 7d990aa69..9ea57395e 100644 --- a/mediapipe/tasks/ios/vision/face_detector/sources/MPPFaceDetectorOptions.m +++ b/mediapipe/tasks/ios/vision/face_detector/sources/MPPFaceDetectorOptions.m @@ -28,6 +28,7 @@ - (id)copyWithZone:(NSZone *)zone { MPPFaceDetectorOptions *faceDetectorOptions = [super copyWithZone:zone]; + faceDetectorOptions.runningMode = self.runningMode; faceDetectorOptions.minDetectionConfidence = self.minDetectionConfidence; faceDetectorOptions.minSuppressionThreshold = self.minSuppressionThreshold; faceDetectorOptions.faceDetectorLiveStreamDelegate = self.faceDetectorLiveStreamDelegate; diff --git a/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerOptions.m b/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerOptions.m index ebef092f0..47a25589e 100644 --- a/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerOptions.m +++ b/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerOptions.m @@ -33,6 +33,7 @@ - (id)copyWithZone:(NSZone *)zone { MPPFaceLandmarkerOptions *faceLandmarkerOptions = [super copyWithZone:zone]; + faceDetectorOptions.runningMode = self.runningMode; faceLandmarkerOptions.numFaces = self.numFaces; faceLandmarkerOptions.minFaceDetectionConfidence = self.minFaceDetectionConfidence; faceLandmarkerOptions.minFacePresenceConfidence = self.minFacePresenceConfidence; diff --git a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierOptions.m b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierOptions.m index 8d3815ff3..99f08d500 100644 --- a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierOptions.m +++ b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierOptions.m @@ -28,6 +28,7 @@ - (id)copyWithZone:(NSZone *)zone { MPPImageClassifierOptions *imageClassifierOptions = [super copyWithZone:zone]; + imageClassifierOptions.runningMode = self.runningMode; imageClassifierOptions.scoreThreshold = self.scoreThreshold; imageClassifierOptions.maxResults = self.maxResults; imageClassifierOptions.categoryDenylist = self.categoryDenylist; diff --git a/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetectorOptions.m b/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetectorOptions.m index b93a6b30b..bb4605cd8 100644 --- a/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetectorOptions.m +++ b/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetectorOptions.m @@ -28,6 +28,7 @@ - (id)copyWithZone:(NSZone *)zone { MPPObjectDetectorOptions *objectDetectorOptions = [super copyWithZone:zone]; + objectDetectorOptions.runningMode = self.runningMode; objectDetectorOptions.scoreThreshold = self.scoreThreshold; objectDetectorOptions.maxResults = self.maxResults; objectDetectorOptions.categoryDenylist = self.categoryDenylist; From 3d79d582866af0d99c39e431595cc9232fe6bfea Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Fri, 23 Jun 2023 20:18:41 +0530 Subject: [PATCH 101/106] Updated variable name --- .../vision/face_landmarker/sources/MPPFaceLandmarkerOptions.m | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerOptions.m b/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerOptions.m index 47a25589e..3438ed8d3 100644 --- a/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerOptions.m +++ b/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerOptions.m @@ -33,7 +33,7 @@ - (id)copyWithZone:(NSZone *)zone { MPPFaceLandmarkerOptions *faceLandmarkerOptions = [super copyWithZone:zone]; - faceDetectorOptions.runningMode = self.runningMode; + faceLandmarkerOptions.runningMode = self.runningMode; faceLandmarkerOptions.numFaces = self.numFaces; faceLandmarkerOptions.minFaceDetectionConfidence = self.minFaceDetectionConfidence; faceLandmarkerOptions.minFacePresenceConfidence = self.minFacePresenceConfidence; From 570880190bd823996b1938f78784c91b6def99c0 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Mon, 26 Jun 2023 01:49:56 -0700 Subject: [PATCH 102/106] Internal change for proto library outputs. PiperOrigin-RevId: 543368974 --- mediapipe/framework/tool/mediapipe_proto.bzl | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/mediapipe/framework/tool/mediapipe_proto.bzl b/mediapipe/framework/tool/mediapipe_proto.bzl index 6e41d054e..142560ce5 100644 --- a/mediapipe/framework/tool/mediapipe_proto.bzl +++ b/mediapipe/framework/tool/mediapipe_proto.bzl @@ -50,6 +50,7 @@ def mediapipe_proto_library_impl( def_cc_proto = True, def_py_proto = True, def_java_lite_proto = True, + def_kt_lite_proto = True, def_objc_proto = True, def_java_proto = True, def_jspb_proto = True, @@ -72,6 +73,7 @@ def mediapipe_proto_library_impl( def_cc_proto: define the cc_proto_library target def_py_proto: define the py_proto_library target def_java_lite_proto: define the java_lite_proto_library target + def_kt_lite_proto: define the kt_lite_proto_library target def_objc_proto: define the objc_proto_library target def_java_proto: define the java_proto_library target def_jspb_proto: define the jspb_proto_library target @@ -255,6 +257,7 @@ def mediapipe_proto_library( def_cc_proto = True, def_py_proto = True, def_java_lite_proto = True, + def_kt_lite_proto = True, def_portable_proto = True, # @unused def_objc_proto = True, def_java_proto = True, @@ -281,6 +284,7 @@ def mediapipe_proto_library( def_cc_proto: define the cc_proto_library target def_py_proto: define the py_proto_library target def_java_lite_proto: define the java_lite_proto_library target + def_kt_lite_proto: define the kt_lite_proto_library target def_portable_proto: ignored since portable protos are gone def_objc_proto: define the objc_proto_library target def_java_proto: define the java_proto_library target @@ -304,6 +308,7 @@ def mediapipe_proto_library( def_cc_proto = def_cc_proto, def_py_proto = def_py_proto, def_java_lite_proto = def_java_lite_proto, + def_kt_lite_proto = def_kt_lite_proto, def_objc_proto = def_objc_proto, def_java_proto = def_java_proto, def_jspb_proto = def_jspb_proto, @@ -334,6 +339,7 @@ def mediapipe_proto_library( def_cc_proto = def_cc_proto, def_py_proto = def_py_proto, def_java_lite_proto = def_java_lite_proto, + def_kt_lite_proto = def_kt_lite_proto, def_objc_proto = def_objc_proto, def_java_proto = def_java_proto, def_jspb_proto = def_jspb_proto, From 9de1b2577f912bc51dc8a8b72a1230c2d68a49e4 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Mon, 26 Jun 2023 12:18:25 -0700 Subject: [PATCH 103/106] Internal update PiperOrigin-RevId: 543508346 --- mediapipe/tasks/cc/core/BUILD | 1 + mediapipe/tasks/cc/core/base_options.cc | 46 +++++++++++++++++++- mediapipe/tasks/cc/core/base_options.h | 33 +++++++++++++- mediapipe/tasks/cc/core/base_options_test.cc | 43 ++++++++++++++++++ 4 files changed, 121 insertions(+), 2 deletions(-) diff --git a/mediapipe/tasks/cc/core/BUILD b/mediapipe/tasks/cc/core/BUILD index dad9cdf1f..a3e44c536 100644 --- a/mediapipe/tasks/cc/core/BUILD +++ b/mediapipe/tasks/cc/core/BUILD @@ -29,6 +29,7 @@ cc_library( "//mediapipe/tasks/cc/core/proto:acceleration_cc_proto", "//mediapipe/tasks/cc/core/proto:base_options_cc_proto", "//mediapipe/tasks/cc/core/proto:external_file_cc_proto", + "@com_google_absl//absl/log", "@com_google_absl//absl/memory", "@org_tensorflow//tensorflow/lite/core/api:op_resolver", "@org_tensorflow//tensorflow/lite/kernels:builtin_ops", diff --git a/mediapipe/tasks/cc/core/base_options.cc b/mediapipe/tasks/cc/core/base_options.cc index a34c23168..b7987f982 100644 --- a/mediapipe/tasks/cc/core/base_options.cc +++ b/mediapipe/tasks/cc/core/base_options.cc @@ -17,15 +17,56 @@ limitations under the License. #include #include +#include +#include "absl/log/log.h" #include "mediapipe/calculators/tensor/inference_calculator.pb.h" #include "mediapipe/tasks/cc/core/proto/acceleration.pb.h" +#include "mediapipe/tasks/cc/core/proto/base_options.pb.h" #include "mediapipe/tasks/cc/core/proto/external_file.pb.h" namespace mediapipe { namespace tasks { namespace core { +proto::Acceleration ConvertDelegateOptionsToAccelerationProto( + const BaseOptions::CpuOptions& options) { + proto::Acceleration acceleration_proto = proto::Acceleration(); + acceleration_proto.mutable_tflite(); + return acceleration_proto; +} + +proto::Acceleration ConvertDelegateOptionsToAccelerationProto( + const BaseOptions::GpuOptions& options) { + proto::Acceleration acceleration_proto = proto::Acceleration(); + auto* gpu = acceleration_proto.mutable_gpu(); + gpu->set_use_advanced_gpu_api(true); + gpu->set_cached_kernel_path(options.cached_kernel_path); + gpu->set_serialized_model_dir(options.serialized_model_dir); + gpu->set_model_token(options.model_token); + return acceleration_proto; +} + +template +void SetDelegateOptionsOrDie(const BaseOptions* base_options, + proto::BaseOptions& base_options_proto) { + if (base_options->delegate_options.has_value()) { + if (!std::holds_alternative(*base_options->delegate_options)) { + LOG(FATAL) << "Specified Delegate type does not match the provided " + "delegate options."; + } else { + std::visit( + [&base_options_proto](const auto& delegate_options) { + proto::Acceleration acceleration_proto = + ConvertDelegateOptionsToAccelerationProto(delegate_options); + base_options_proto.mutable_acceleration()->Swap( + &acceleration_proto); + }, + *base_options->delegate_options); + } + } +} + proto::BaseOptions ConvertBaseOptionsToProto(BaseOptions* base_options) { proto::BaseOptions base_options_proto; if (!base_options->model_asset_path.empty()) { @@ -53,11 +94,15 @@ proto::BaseOptions ConvertBaseOptionsToProto(BaseOptions* base_options) { switch (base_options->delegate) { case BaseOptions::Delegate::CPU: base_options_proto.mutable_acceleration()->mutable_tflite(); + SetDelegateOptionsOrDie(base_options, + base_options_proto); break; case BaseOptions::Delegate::GPU: base_options_proto.mutable_acceleration() ->mutable_gpu() ->set_use_advanced_gpu_api(true); + SetDelegateOptionsOrDie(base_options, + base_options_proto); break; case BaseOptions::Delegate::EDGETPU_NNAPI: base_options_proto.mutable_acceleration() @@ -65,7 +110,6 @@ proto::BaseOptions ConvertBaseOptionsToProto(BaseOptions* base_options) { ->set_accelerator_name("google-edgetpu"); break; } - return base_options_proto; } } // namespace core diff --git a/mediapipe/tasks/cc/core/base_options.h b/mediapipe/tasks/cc/core/base_options.h index 021aebbe5..738d71093 100644 --- a/mediapipe/tasks/cc/core/base_options.h +++ b/mediapipe/tasks/cc/core/base_options.h @@ -17,7 +17,9 @@ limitations under the License. #define MEDIAPIPE_TASKS_CC_CORE_BASE_OPTIONS_H_ #include +#include #include +#include #include "absl/memory/memory.h" #include "mediapipe/tasks/cc/core/mediapipe_builtin_op_resolver.h" @@ -38,7 +40,8 @@ struct BaseOptions { std::string model_asset_path = ""; // The delegate to run MediaPipe. If the delegate is not set, the default - // delegate CPU is used. + // delegate CPU is used. Use `delegate_options` to configure advanced + // features of the selected delegate." enum Delegate { CPU = 0, GPU = 1, @@ -48,6 +51,30 @@ struct BaseOptions { Delegate delegate = CPU; + // Options for CPU. + struct CpuOptions {}; + + // Options for GPU. + struct GpuOptions { + // Load pre-compiled serialized binary cache to accelerate init process. + // Only available on Android. Kernel caching will only be enabled if this + // path is set. NOTE: binary cache usage may be skipped if valid serialized + // model, specified by "serialized_model_dir", exists. + std::string cached_kernel_path; + + // A dir to load from and save to a pre-compiled serialized model used to + // accelerate init process. + // NOTE: serialized model takes precedence over binary cache + // specified by "cached_kernel_path", which still can be used if + // serialized model is invalid or missing. + std::string serialized_model_dir; + + // Unique token identifying the model. Used in conjunction with + // "serialized_model_dir". It is the caller's responsibility to ensure + // there is no clash of the tokens. + std::string model_token; + }; + // The file descriptor to a file opened with open(2), with optional additional // offset and length information. struct FileDescriptorMeta { @@ -67,6 +94,10 @@ struct BaseOptions { // built-in Ops. std::unique_ptr op_resolver = absl::make_unique(); + + // Options for the chosen delegate. If not set, the default delegate options + // is used. + std::optional> delegate_options; }; // Converts a BaseOptions to a BaseOptionsProto. diff --git a/mediapipe/tasks/cc/core/base_options_test.cc b/mediapipe/tasks/cc/core/base_options_test.cc index dce95050d..af9a55a37 100644 --- a/mediapipe/tasks/cc/core/base_options_test.cc +++ b/mediapipe/tasks/cc/core/base_options_test.cc @@ -1,6 +1,9 @@ #include "mediapipe/tasks/cc/core/base_options.h" +#include +#include #include +#include #include "mediapipe/calculators/tensor/inference_calculator.pb.h" #include "mediapipe/framework/port/gmock.h" @@ -11,6 +14,8 @@ constexpr char kTestModelBundlePath[] = "mediapipe/tasks/testdata/core/dummy_gesture_recognizer.task"; +constexpr char kCachedModelDir[] = "/data/local/tmp"; +constexpr char kModelToken[] = "dummy_model_token"; namespace mediapipe { namespace tasks { @@ -40,6 +45,44 @@ TEST(BaseOptionsTest, ConvertBaseOptionsToProtoWithAcceleration) { EXPECT_EQ(proto.acceleration().nnapi().accelerator_name(), "google-edgetpu"); } +TEST(DelegateOptionsTest, SucceedCpuOptions) { + BaseOptions base_options; + base_options.delegate = BaseOptions::Delegate::CPU; + BaseOptions::CpuOptions cpu_options; + base_options.delegate_options = cpu_options; + proto::BaseOptions proto = ConvertBaseOptionsToProto(&base_options); + EXPECT_TRUE(proto.acceleration().has_tflite()); + ASSERT_FALSE(proto.acceleration().has_gpu()); +} + +TEST(DelegateOptionsTest, SucceedGpuOptions) { + BaseOptions base_options; + base_options.delegate = BaseOptions::Delegate::GPU; + BaseOptions::GpuOptions gpu_options; + gpu_options.cached_kernel_path = kCachedModelDir; + gpu_options.model_token = kModelToken; + base_options.delegate_options = gpu_options; + proto::BaseOptions proto = ConvertBaseOptionsToProto(&base_options); + ASSERT_TRUE(proto.acceleration().has_gpu()); + ASSERT_FALSE(proto.acceleration().has_tflite()); + EXPECT_TRUE(proto.acceleration().gpu().use_advanced_gpu_api()); + EXPECT_EQ(proto.acceleration().gpu().cached_kernel_path(), kCachedModelDir); + EXPECT_EQ(proto.acceleration().gpu().model_token(), kModelToken); +} + +TEST(DelegateOptionsDeathTest, FailWrongDelegateOptionsType) { + BaseOptions base_options; + base_options.delegate = BaseOptions::Delegate::CPU; + BaseOptions::GpuOptions gpu_options; + gpu_options.cached_kernel_path = kCachedModelDir; + gpu_options.model_token = kModelToken; + base_options.delegate_options = gpu_options; + ASSERT_DEATH( + { proto::BaseOptions proto = ConvertBaseOptionsToProto(&base_options); }, + "Specified Delegate type does not match the provided " + "delegate options."); +} + } // namespace } // namespace core } // namespace tasks From c8c5f3d062f441eb37738c789a3550e7280ebefe Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Mon, 26 Jun 2023 18:54:48 -0700 Subject: [PATCH 104/106] Internal change PiperOrigin-RevId: 543602625 --- mediapipe/tasks/cc/text/text_classifier/BUILD | 3 +-- .../tasks/cc/text/text_classifier/text_classifier_test.cc | 3 --- 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/mediapipe/tasks/cc/text/text_classifier/BUILD b/mediapipe/tasks/cc/text/text_classifier/BUILD index 28b5d709e..121b4f5e6 100644 --- a/mediapipe/tasks/cc/text/text_classifier/BUILD +++ b/mediapipe/tasks/cc/text/text_classifier/BUILD @@ -86,10 +86,9 @@ cc_test( "//mediapipe/tasks/cc/components/containers:classification_result", "@com_google_absl//absl/flags:flag", "@com_google_absl//absl/status", - "@com_google_absl//absl/status:statusor", "@com_google_absl//absl/strings", "@com_google_absl//absl/strings:cord", - "@com_google_sentencepiece//src:sentencepiece_processor", + "@com_google_sentencepiece//src:sentencepiece_processor", # fixdeps: keep "@org_tensorflow//tensorflow/lite:test_util", ], ) diff --git a/mediapipe/tasks/cc/text/text_classifier/text_classifier_test.cc b/mediapipe/tasks/cc/text/text_classifier/text_classifier_test.cc index e10bd53f3..dfb78c07f 100644 --- a/mediapipe/tasks/cc/text/text_classifier/text_classifier_test.cc +++ b/mediapipe/tasks/cc/text/text_classifier/text_classifier_test.cc @@ -15,8 +15,6 @@ limitations under the License. #include "mediapipe/tasks/cc/text/text_classifier/text_classifier.h" -#include -#include #include #include #include @@ -24,7 +22,6 @@ limitations under the License. #include "absl/flags/flag.h" #include "absl/status/status.h" -#include "absl/status/statusor.h" #include "absl/strings/cord.h" #include "absl/strings/str_cat.h" #include "absl/strings/string_view.h" From bed624f3b6f7ad5d25b5474c516561c537f10199 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 27 Jun 2023 01:59:59 -0700 Subject: [PATCH 105/106] Shows the recently added warning when WaitUntilIdle is called with source nodes only once. Otherwise, it is very spammy as it's shown every frame. Moreover, display the names of the sources, so the warning is more actionable. PiperOrigin-RevId: 543676454 --- mediapipe/framework/calculator_graph.cc | 17 +++++++++++++++-- mediapipe/framework/calculator_graph.h | 3 +++ 2 files changed, 18 insertions(+), 2 deletions(-) diff --git a/mediapipe/framework/calculator_graph.cc b/mediapipe/framework/calculator_graph.cc index 2a2088c6b..33ca41fb8 100644 --- a/mediapipe/framework/calculator_graph.cc +++ b/mediapipe/framework/calculator_graph.cc @@ -840,9 +840,12 @@ absl::Status CalculatorGraph::PrepareForRun( absl::Status CalculatorGraph::WaitUntilIdle() { if (has_sources_) { - LOG(WARNING) << "WaitUntilIdle called on a graph with source nodes, which " - "is not fully supported at the moment."; + LOG_FIRST_N(WARNING, 1) + << "WaitUntilIdle called on a graph with source nodes, which " + "is not fully supported at the moment. Source nodes: " + << ListSourceNodes(); } + MP_RETURN_IF_ERROR(scheduler_.WaitUntilIdle()); VLOG(2) << "Scheduler idle."; absl::Status status = absl::OkStatus(); @@ -1372,6 +1375,16 @@ const OutputStreamManager* CalculatorGraph::FindOutputStreamManager( .get()[validated_graph_->OutputStreamIndex(name)]; } +std::string CalculatorGraph::ListSourceNodes() const { + std::vector sources; + for (auto& node : nodes_) { + if (node->IsSource()) { + sources.push_back(node->DebugName()); + } + } + return absl::StrJoin(sources, ", "); +} + namespace { void PrintTimingToInfo(const std::string& label, int64_t timer_value) { const int64_t total_seconds = timer_value / 1000000ll; diff --git a/mediapipe/framework/calculator_graph.h b/mediapipe/framework/calculator_graph.h index 748d2fb32..00c922a3b 100644 --- a/mediapipe/framework/calculator_graph.h +++ b/mediapipe/framework/calculator_graph.h @@ -597,6 +597,9 @@ class CalculatorGraph { // status before taking any action. void UpdateThrottledNodes(InputStreamManager* stream, bool* stream_was_full); + // Returns a comma-separated list of source nodes. + std::string ListSourceNodes() const; + #if !MEDIAPIPE_DISABLE_GPU // Owns the legacy GpuSharedData if we need to create one for backwards // compatibility. From 1ee55d1f1bf682f7d06fc450330439d6cb1b5974 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 27 Jun 2023 18:02:59 -0700 Subject: [PATCH 106/106] Support ExBert training and option to select between AdamW and LAMB optimizers for BertClassifier PiperOrigin-RevId: 543905014 --- .../python/text/text_classifier/BUILD | 16 ++-- .../python/text/text_classifier/__init__.py | 14 ++-- .../text/text_classifier/hyperparameters.py | 54 +++++++++++++ .../python/text/text_classifier/model_spec.py | 47 +++++++++-- .../text/text_classifier/model_spec_test.py | 15 ++-- .../text/text_classifier/text_classifier.py | 81 +++++++++++++------ .../text_classifier/text_classifier_demo.py | 10 ++- .../text_classifier_options.py | 4 +- .../text_classifier/text_classifier_test.py | 23 +++--- 9 files changed, 202 insertions(+), 62 deletions(-) create mode 100644 mediapipe/model_maker/python/text/text_classifier/hyperparameters.py diff --git a/mediapipe/model_maker/python/text/text_classifier/BUILD b/mediapipe/model_maker/python/text/text_classifier/BUILD index 26412d2cb..64ace4ba0 100644 --- a/mediapipe/model_maker/python/text/text_classifier/BUILD +++ b/mediapipe/model_maker/python/text/text_classifier/BUILD @@ -31,11 +31,11 @@ py_library( visibility = ["//visibility:public"], deps = [ ":dataset", + ":hyperparameters", ":model_options", ":model_spec", ":text_classifier", ":text_classifier_options", - "//mediapipe/model_maker/python/core:hyperparameters", ], ) @@ -45,12 +45,18 @@ py_library( deps = ["//mediapipe/model_maker/python/text/core:bert_model_options"], ) +py_library( + name = "hyperparameters", + srcs = ["hyperparameters.py"], + deps = ["//mediapipe/model_maker/python/core:hyperparameters"], +) + py_library( name = "model_spec", srcs = ["model_spec.py"], deps = [ + ":hyperparameters", ":model_options", - "//mediapipe/model_maker/python/core:hyperparameters", "//mediapipe/model_maker/python/core/utils:file_util", "//mediapipe/model_maker/python/text/core:bert_model_spec", ], @@ -61,9 +67,9 @@ py_test( srcs = ["model_spec_test.py"], tags = ["requires-net:external"], deps = [ + ":hyperparameters", ":model_options", ":model_spec", - "//mediapipe/model_maker/python/core:hyperparameters", ], ) @@ -100,9 +106,9 @@ py_library( name = "text_classifier_options", srcs = ["text_classifier_options.py"], deps = [ + ":hyperparameters", ":model_options", ":model_spec", - "//mediapipe/model_maker/python/core:hyperparameters", ], ) @@ -111,11 +117,11 @@ py_library( srcs = ["text_classifier.py"], deps = [ ":dataset", + ":hyperparameters", ":model_options", ":model_spec", ":preprocessor", ":text_classifier_options", - "//mediapipe/model_maker/python/core:hyperparameters", "//mediapipe/model_maker/python/core/data:dataset", "//mediapipe/model_maker/python/core/tasks:classifier", "//mediapipe/model_maker/python/core/utils:metrics", diff --git a/mediapipe/model_maker/python/text/text_classifier/__init__.py b/mediapipe/model_maker/python/text/text_classifier/__init__.py index 4df3a771e..7eb0f9259 100644 --- a/mediapipe/model_maker/python/text/text_classifier/__init__.py +++ b/mediapipe/model_maker/python/text/text_classifier/__init__.py @@ -13,19 +13,23 @@ # limitations under the License. """MediaPipe Public Python API for Text Classifier.""" -from mediapipe.model_maker.python.core import hyperparameters from mediapipe.model_maker.python.text.text_classifier import dataset +from mediapipe.model_maker.python.text.text_classifier import hyperparameters from mediapipe.model_maker.python.text.text_classifier import model_options from mediapipe.model_maker.python.text.text_classifier import model_spec from mediapipe.model_maker.python.text.text_classifier import text_classifier from mediapipe.model_maker.python.text.text_classifier import text_classifier_options -HParams = hyperparameters.BaseHParams + +AverageWordEmbeddingHParams = hyperparameters.AverageWordEmbeddingHParams +AverageWordEmbeddingModelOptions = ( + model_options.AverageWordEmbeddingModelOptions +) +BertOptimizer = hyperparameters.BertOptimizer +BertHParams = hyperparameters.BertHParams +BertModelOptions = model_options.BertModelOptions CSVParams = dataset.CSVParameters Dataset = dataset.Dataset -AverageWordEmbeddingModelOptions = ( - model_options.AverageWordEmbeddingModelOptions) -BertModelOptions = model_options.BertModelOptions SupportedModels = model_spec.SupportedModels TextClassifier = text_classifier.TextClassifier TextClassifierOptions = text_classifier_options.TextClassifierOptions diff --git a/mediapipe/model_maker/python/text/text_classifier/hyperparameters.py b/mediapipe/model_maker/python/text/text_classifier/hyperparameters.py new file mode 100644 index 000000000..ae0a9a627 --- /dev/null +++ b/mediapipe/model_maker/python/text/text_classifier/hyperparameters.py @@ -0,0 +1,54 @@ +# Copyright 2023 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Hyperparameters for training object detection models.""" + +import dataclasses +import enum +from typing import Union + +from mediapipe.model_maker.python.core import hyperparameters as hp + + +@dataclasses.dataclass +class AverageWordEmbeddingHParams(hp.BaseHParams): + """The hyperparameters for an AverageWordEmbeddingClassifier.""" + + +@enum.unique +class BertOptimizer(enum.Enum): + """Supported Optimizers for Bert Text Classifier.""" + + ADAMW = "adamw" + LAMB = "lamb" + + +@dataclasses.dataclass +class BertHParams(hp.BaseHParams): + """The hyperparameters for a Bert Classifier. + + Attributes: + learning_rate: Learning rate to use for gradient descent training. + batch_size: Batch size for training. + epochs: Number of training iterations over the dataset. + optimizer: Optimizer to use for training. Only supported values are "adamw" + and "lamb". + """ + + learning_rate: float = 3e-5 + batch_size: int = 48 + epochs: int = 2 + optimizer: BertOptimizer = BertOptimizer.ADAMW + + +HParams = Union[BertHParams, AverageWordEmbeddingHParams] diff --git a/mediapipe/model_maker/python/text/text_classifier/model_spec.py b/mediapipe/model_maker/python/text/text_classifier/model_spec.py index e947f8c18..452e22679 100644 --- a/mediapipe/model_maker/python/text/text_classifier/model_spec.py +++ b/mediapipe/model_maker/python/text/text_classifier/model_spec.py @@ -17,13 +17,11 @@ import dataclasses import enum import functools -from mediapipe.model_maker.python.core import hyperparameters as hp from mediapipe.model_maker.python.core.utils import file_util from mediapipe.model_maker.python.text.core import bert_model_spec +from mediapipe.model_maker.python.text.text_classifier import hyperparameters as hp from mediapipe.model_maker.python.text.text_classifier import model_options as mo -# BERT-based text classifier spec inherited from BertModelSpec -BertClassifierSpec = bert_model_spec.BertModelSpec MOBILEBERT_TINY_FILES = file_util.DownloadedFiles( 'text_classifier/mobilebert_tiny', @@ -31,6 +29,12 @@ MOBILEBERT_TINY_FILES = file_util.DownloadedFiles( is_folder=True, ) +EXBERT_FILES = file_util.DownloadedFiles( + 'text_classifier/exbert', + 'https://storage.googleapis.com/mediapipe-assets/exbert.tar.gz', + is_folder=True, +) + @dataclasses.dataclass class AverageWordEmbeddingClassifierSpec: @@ -43,27 +47,53 @@ class AverageWordEmbeddingClassifierSpec: """ # `learning_rate` is unused for the average word embedding model - hparams: hp.BaseHParams = hp.BaseHParams( - epochs=10, batch_size=32, learning_rate=0) + hparams: hp.AverageWordEmbeddingHParams = hp.AverageWordEmbeddingHParams( + epochs=10, batch_size=32, learning_rate=0 + ) model_options: mo.AverageWordEmbeddingModelOptions = ( mo.AverageWordEmbeddingModelOptions()) name: str = 'AverageWordEmbedding' - average_word_embedding_classifier_spec = functools.partial( AverageWordEmbeddingClassifierSpec) + +@dataclasses.dataclass +class BertClassifierSpec(bert_model_spec.BertModelSpec): + """Specification for a Bert classifier model. + + Only overrides the hparams attribute since the rest of the attributes are + inherited from the BertModelSpec. + """ + + hparams: hp.BertHParams = hp.BertHParams() + + mobilebert_classifier_spec = functools.partial( BertClassifierSpec, downloaded_files=MOBILEBERT_TINY_FILES, - hparams=hp.BaseHParams( + hparams=hp.BertHParams( epochs=3, batch_size=48, learning_rate=3e-5, distribution_strategy='off' ), name='MobileBert', tflite_input_name={ 'ids': 'serving_default_input_1:0', - 'mask': 'serving_default_input_3:0', 'segment_ids': 'serving_default_input_2:0', + 'mask': 'serving_default_input_3:0', + }, +) + +exbert_classifier_spec = functools.partial( + BertClassifierSpec, + downloaded_files=EXBERT_FILES, + hparams=hp.BertHParams( + epochs=3, batch_size=48, learning_rate=3e-5, distribution_strategy='off' + ), + name='ExBert', + tflite_input_name={ + 'ids': 'serving_default_input_1:0', + 'segment_ids': 'serving_default_input_2:0', + 'mask': 'serving_default_input_3:0', }, ) @@ -73,3 +103,4 @@ class SupportedModels(enum.Enum): """Predefined text classifier model specs supported by Model Maker.""" AVERAGE_WORD_EMBEDDING_CLASSIFIER = average_word_embedding_classifier_spec MOBILEBERT_CLASSIFIER = mobilebert_classifier_spec + EXBERT_CLASSIFIER = exbert_classifier_spec diff --git a/mediapipe/model_maker/python/text/text_classifier/model_spec_test.py b/mediapipe/model_maker/python/text/text_classifier/model_spec_test.py index a8d40558c..7c45a2675 100644 --- a/mediapipe/model_maker/python/text/text_classifier/model_spec_test.py +++ b/mediapipe/model_maker/python/text/text_classifier/model_spec_test.py @@ -19,7 +19,7 @@ from unittest import mock as unittest_mock import tensorflow as tf -from mediapipe.model_maker.python.core import hyperparameters as hp +from mediapipe.model_maker.python.text.text_classifier import hyperparameters as hp from mediapipe.model_maker.python.text.text_classifier import model_options as classifier_model_options from mediapipe.model_maker.python.text.text_classifier import model_spec as ms @@ -57,11 +57,13 @@ class ModelSpecTest(tf.test.TestCase): seq_len=128, do_fine_tuning=True, dropout_rate=0.1)) self.assertEqual( model_spec_obj.hparams, - hp.BaseHParams( + hp.BertHParams( epochs=3, batch_size=48, learning_rate=3e-5, - distribution_strategy='off')) + distribution_strategy='off', + ), + ) def test_predefined_average_word_embedding_spec(self): model_spec_obj = ( @@ -78,7 +80,7 @@ class ModelSpecTest(tf.test.TestCase): dropout_rate=0.2)) self.assertEqual( model_spec_obj.hparams, - hp.BaseHParams( + hp.AverageWordEmbeddingHParams( epochs=10, batch_size=32, learning_rate=0, @@ -101,7 +103,7 @@ class ModelSpecTest(tf.test.TestCase): custom_bert_classifier_options) def test_custom_average_word_embedding_spec(self): - custom_hparams = hp.BaseHParams( + custom_hparams = hp.AverageWordEmbeddingHParams( learning_rate=0.4, batch_size=64, epochs=10, @@ -110,7 +112,8 @@ class ModelSpecTest(tf.test.TestCase): export_dir='foo/bar', distribution_strategy='mirrored', num_gpus=3, - tpu='tpu/address') + tpu='tpu/address', + ) custom_average_word_embedding_model_options = ( classifier_model_options.AverageWordEmbeddingModelOptions( seq_len=512, diff --git a/mediapipe/model_maker/python/text/text_classifier/text_classifier.py b/mediapipe/model_maker/python/text/text_classifier/text_classifier.py index 59369931d..6c8adc82c 100644 --- a/mediapipe/model_maker/python/text/text_classifier/text_classifier.py +++ b/mediapipe/model_maker/python/text/text_classifier/text_classifier.py @@ -19,15 +19,16 @@ import tempfile from typing import Any, Optional, Sequence, Tuple import tensorflow as tf +from tensorflow_addons import optimizers as tfa_optimizers import tensorflow_hub as hub -from mediapipe.model_maker.python.core import hyperparameters as hp from mediapipe.model_maker.python.core.data import dataset as ds from mediapipe.model_maker.python.core.tasks import classifier from mediapipe.model_maker.python.core.utils import metrics from mediapipe.model_maker.python.core.utils import model_util from mediapipe.model_maker.python.core.utils import quantization from mediapipe.model_maker.python.text.text_classifier import dataset as text_ds +from mediapipe.model_maker.python.text.text_classifier import hyperparameters as hp from mediapipe.model_maker.python.text.text_classifier import model_options as mo from mediapipe.model_maker.python.text.text_classifier import model_spec as ms from mediapipe.model_maker.python.text.text_classifier import preprocessor @@ -55,22 +56,26 @@ def _validate(options: text_classifier_options.TextClassifierOptions): ms.SupportedModels.AVERAGE_WORD_EMBEDDING_CLASSIFIER)): raise ValueError("Expected AVERAGE_WORD_EMBEDDING_CLASSIFIER," f" got {options.supported_model}") - if (isinstance(options.model_options, mo.BertModelOptions) and - (options.supported_model != ms.SupportedModels.MOBILEBERT_CLASSIFIER)): + if isinstance(options.model_options, mo.BertModelOptions) and ( + options.supported_model != ms.SupportedModels.MOBILEBERT_CLASSIFIER + and options.supported_model != ms.SupportedModels.EXBERT_CLASSIFIER + ): raise ValueError( - f"Expected MOBILEBERT_CLASSIFIER, got {options.supported_model}") + "Expected a Bert Classifier(MobileBERT or EXBERT), got " + f"{options.supported_model}" + ) class TextClassifier(classifier.Classifier): """API for creating and training a text classification model.""" - def __init__(self, model_spec: Any, hparams: hp.BaseHParams, - label_names: Sequence[str]): + def __init__( + self, model_spec: Any, label_names: Sequence[str], shuffle: bool + ): super().__init__( - model_spec=model_spec, label_names=label_names, shuffle=hparams.shuffle) + model_spec=model_spec, label_names=label_names, shuffle=shuffle + ) self._model_spec = model_spec - self._hparams = hparams - self._callbacks = model_util.get_default_callbacks(self._hparams.export_dir) self._text_preprocessor: preprocessor.TextClassifierPreprocessor = None @classmethod @@ -107,7 +112,10 @@ class TextClassifier(classifier.Classifier): if options.hparams is None: options.hparams = options.supported_model.value().hparams - if options.supported_model == ms.SupportedModels.MOBILEBERT_CLASSIFIER: + if ( + options.supported_model == ms.SupportedModels.MOBILEBERT_CLASSIFIER + or options.supported_model == ms.SupportedModels.EXBERT_CLASSIFIER + ): text_classifier = ( _BertClassifier.create_bert_classifier(train_data, validation_data, options, @@ -225,11 +233,17 @@ class _AverageWordEmbeddingClassifier(TextClassifier): _DELIM_REGEX_PATTERN = r"[^\w\']+" - def __init__(self, model_spec: ms.AverageWordEmbeddingClassifierSpec, - model_options: mo.AverageWordEmbeddingModelOptions, - hparams: hp.BaseHParams, label_names: Sequence[str]): - super().__init__(model_spec, hparams, label_names) + def __init__( + self, + model_spec: ms.AverageWordEmbeddingClassifierSpec, + model_options: mo.AverageWordEmbeddingModelOptions, + hparams: hp.AverageWordEmbeddingHParams, + label_names: Sequence[str], + ): + super().__init__(model_spec, label_names, hparams.shuffle) self._model_options = model_options + self._hparams = hparams + self._callbacks = model_util.get_default_callbacks(self._hparams.export_dir) self._loss_function = "sparse_categorical_crossentropy" self._metric_functions = [ "accuracy", @@ -344,10 +358,16 @@ class _BertClassifier(TextClassifier): _INITIALIZER_RANGE = 0.02 - def __init__(self, model_spec: ms.BertClassifierSpec, - model_options: mo.BertModelOptions, hparams: hp.BaseHParams, - label_names: Sequence[str]): - super().__init__(model_spec, hparams, label_names) + def __init__( + self, + model_spec: ms.BertClassifierSpec, + model_options: mo.BertModelOptions, + hparams: hp.BertHParams, + label_names: Sequence[str], + ): + super().__init__(model_spec, label_names, hparams.shuffle) + self._hparams = hparams + self._callbacks = model_util.get_default_callbacks(self._hparams.export_dir) self._model_options = model_options with self._hparams.get_strategy().scope(): self._loss_function = tf.keras.losses.SparseCategoricalCrossentropy() @@ -480,11 +500,26 @@ class _BertClassifier(TextClassifier): initial_learning_rate=initial_lr, decay_schedule_fn=lr_schedule, warmup_steps=warmup_steps) - - self._optimizer = tf.keras.optimizers.experimental.AdamW( - lr_schedule, weight_decay=0.01, epsilon=1e-6, global_clipnorm=1.0) - self._optimizer.exclude_from_weight_decay( - var_names=["LayerNorm", "layer_norm", "bias"]) + if self._hparams.optimizer == hp.BertOptimizer.ADAMW: + self._optimizer = tf.keras.optimizers.experimental.AdamW( + lr_schedule, weight_decay=0.01, epsilon=1e-6, global_clipnorm=1.0 + ) + self._optimizer.exclude_from_weight_decay( + var_names=["LayerNorm", "layer_norm", "bias"] + ) + elif self._hparams.optimizer == hp.BertOptimizer.LAMB: + self._optimizer = tfa_optimizers.LAMB( + lr_schedule, + weight_decay_rate=0.01, + epsilon=1e-6, + exclude_from_weight_decay=["LayerNorm", "layer_norm", "bias"], + global_clipnorm=1.0, + ) + else: + raise ValueError( + "BertHParams.optimizer must be set to ADAM or " + f"LAMB. Got {self._hparams.optimizer}." + ) def _save_vocab(self, vocab_filepath: str): tf.io.gfile.copy( diff --git a/mediapipe/model_maker/python/text/text_classifier/text_classifier_demo.py b/mediapipe/model_maker/python/text/text_classifier/text_classifier_demo.py index c3d1711dc..934bb1c4b 100644 --- a/mediapipe/model_maker/python/text/text_classifier/text_classifier_demo.py +++ b/mediapipe/model_maker/python/text/text_classifier/text_classifier_demo.py @@ -66,14 +66,16 @@ def run(data_dir, quantization_config = None if (supported_model == text_classifier.SupportedModels.AVERAGE_WORD_EMBEDDING_CLASSIFIER): - hparams = text_classifier.HParams( - epochs=10, batch_size=32, learning_rate=0, export_dir=export_dir) + hparams = text_classifier.AverageWordEmbeddingHParams( + epochs=10, batch_size=32, learning_rate=0, export_dir=export_dir + ) # Warning: This takes extremely long to run on CPU elif ( supported_model == text_classifier.SupportedModels.MOBILEBERT_CLASSIFIER): quantization_config = quantization.QuantizationConfig.for_dynamic() - hparams = text_classifier.HParams( - epochs=3, batch_size=48, learning_rate=3e-5, export_dir=export_dir) + hparams = text_classifier.BertHParams( + epochs=3, batch_size=48, learning_rate=3e-5, export_dir=export_dir + ) # Fine-tunes the model. options = text_classifier.TextClassifierOptions( diff --git a/mediapipe/model_maker/python/text/text_classifier/text_classifier_options.py b/mediapipe/model_maker/python/text/text_classifier/text_classifier_options.py index c62fb27bf..b61731f16 100644 --- a/mediapipe/model_maker/python/text/text_classifier/text_classifier_options.py +++ b/mediapipe/model_maker/python/text/text_classifier/text_classifier_options.py @@ -16,7 +16,7 @@ import dataclasses from typing import Optional -from mediapipe.model_maker.python.core import hyperparameters as hp +from mediapipe.model_maker.python.text.text_classifier import hyperparameters as hp from mediapipe.model_maker.python.text.text_classifier import model_options as mo from mediapipe.model_maker.python.text.text_classifier import model_spec as ms @@ -34,5 +34,5 @@ class TextClassifierOptions: architecture of the `supported_model`. """ supported_model: ms.SupportedModels - hparams: Optional[hp.BaseHParams] = None + hparams: Optional[hp.HParams] = None model_options: Optional[mo.TextClassifierModelOptions] = None diff --git a/mediapipe/model_maker/python/text/text_classifier/text_classifier_test.py b/mediapipe/model_maker/python/text/text_classifier/text_classifier_test.py index 34830c9ff..e6057059c 100644 --- a/mediapipe/model_maker/python/text/text_classifier/text_classifier_test.py +++ b/mediapipe/model_maker/python/text/text_classifier/text_classifier_test.py @@ -66,12 +66,14 @@ class TextClassifierTest(tf.test.TestCase): def test_create_and_train_average_word_embedding_model(self): train_data, validation_data = self._get_data() - options = ( - text_classifier.TextClassifierOptions( - supported_model=(text_classifier.SupportedModels - .AVERAGE_WORD_EMBEDDING_CLASSIFIER), - hparams=text_classifier.HParams( - epochs=1, batch_size=1, learning_rate=0))) + options = text_classifier.TextClassifierOptions( + supported_model=( + text_classifier.SupportedModels.AVERAGE_WORD_EMBEDDING_CLASSIFIER + ), + hparams=text_classifier.AverageWordEmbeddingHParams( + epochs=1, batch_size=1, learning_rate=0 + ), + ) average_word_embedding_classifier = ( text_classifier.TextClassifier.create(train_data, validation_data, options)) @@ -103,12 +105,15 @@ class TextClassifierTest(tf.test.TestCase): options = text_classifier.TextClassifierOptions( supported_model=text_classifier.SupportedModels.MOBILEBERT_CLASSIFIER, model_options=text_classifier.BertModelOptions( - do_fine_tuning=False, seq_len=2), - hparams=text_classifier.HParams( + do_fine_tuning=False, seq_len=2 + ), + hparams=text_classifier.BertHParams( epochs=1, batch_size=1, learning_rate=3e-5, - distribution_strategy='off')) + distribution_strategy='off', + ), + ) bert_classifier = text_classifier.TextClassifier.create( train_data, validation_data, options)